diff --git a/.env.example b/.env.example index a0d38f51c..f443818b3 100644 --- a/.env.example +++ b/.env.example @@ -34,5 +34,14 @@ INFOQUEST_API_KEY=your-infoquest-api-key # GitHub API Token # GITHUB_TOKEN=your-github-token + +# Database (only needed when config.yaml has database.backend: postgres) +# DATABASE_URL=postgresql://deerflow:password@localhost:5432/deerflow +# # WECOM_BOT_ID=your-wecom-bot-id # WECOM_BOT_SECRET=your-wecom-bot-secret +# DINGTALK_CLIENT_ID=your-dingtalk-client-id +# DINGTALK_CLIENT_SECRET=your-dingtalk-client-secret + +# Set to "false" to disable Swagger UI, ReDoc, and OpenAPI schema in production +# GATEWAY_ENABLE_DOCS=false diff --git a/.gitignore b/.gitignore index 4e46d2e71..0076848e0 100644 --- a/.gitignore +++ b/.gitignore @@ -40,6 +40,7 @@ coverage/ skills/custom/* logs/ log/ +debug.log # Local git hooks (keep only on this machine, do not push) .githooks/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..c79d53b51 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,33 @@ +repos: + # Backend: ruff lint + format via uv (uses the same ruff version as backend deps) + - repo: local + hooks: + - id: ruff + name: ruff lint + entry: bash -c 'cd backend && uv run ruff check --fix "${@/#backend\//}"' -- + language: system + types_or: [python] + files: ^backend/ + - id: ruff-format + name: ruff format + entry: bash -c 'cd backend && uv run ruff format "${@/#backend\//}"' -- + language: system + types_or: [python] + files: ^backend/ + + # Frontend: eslint + prettier (must run from frontend/ for node_modules resolution) + - repo: local + hooks: + - id: frontend-eslint + name: eslint (frontend) + entry: bash -c 'cd frontend && npx eslint --fix "${@/#frontend\//}"' -- + language: system + types_or: [javascript, tsx, ts] + files: ^frontend/ + + - id: frontend-prettier + name: prettier (frontend) + entry: bash -c 'cd frontend && npx prettier --write "${@/#frontend\//}"' -- + language: system + files: ^frontend/ + types_or: [javascript, tsx, ts, json, css] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 241ca71af..b7cb2840b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -166,7 +166,7 @@ Required tools: 1. **Configure the application** (same as Docker setup above) -2. **Install dependencies**: +2. **Install dependencies** (this also sets up pre-commit hooks): ```bash make install ``` diff --git a/Makefile b/Makefile index b21d860ae..c60d9b9b2 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ # DeerFlow - Unified Development Environment -.PHONY: help config config-upgrade check install setup doctor dev dev-pro dev-daemon dev-daemon-pro start start-pro start-daemon start-daemon-pro stop up up-pro down clean docker-init docker-start docker-start-pro docker-stop docker-logs docker-logs-frontend docker-logs-gateway +.PHONY: help config config-upgrade check install setup doctor dev dev-daemon start start-daemon stop up down clean docker-init docker-start docker-stop docker-logs docker-logs-frontend docker-logs-gateway BASH ?= bash BACKEND_UV_RUN = cd backend && uv run @@ -23,28 +23,22 @@ help: @echo " make config - Generate local config files (aborts if config already exists)" @echo " make config-upgrade - Merge new fields from config.example.yaml into config.yaml" @echo " make check - Check if all required tools are installed" - @echo " make install - Install all dependencies (frontend + backend)" + @echo " make install - Install all dependencies (frontend + backend + pre-commit hooks)" @echo " make setup-sandbox - Pre-pull sandbox container image (recommended)" @echo " make dev - Start all services in development mode (with hot-reloading)" - @echo " make dev-pro - Start in dev + Gateway mode (experimental, no LangGraph server)" @echo " make dev-daemon - Start dev services in background (daemon mode)" - @echo " make dev-daemon-pro - Start dev daemon + Gateway mode (experimental)" @echo " make start - Start all services in production mode (optimized, no hot-reloading)" - @echo " make start-pro - Start in prod + Gateway mode (experimental)" @echo " make start-daemon - Start prod services in background (daemon mode)" - @echo " make start-daemon-pro - Start prod daemon + Gateway mode (experimental)" @echo " make stop - Stop all running services" @echo " make clean - Clean up processes and temporary files" @echo "" @echo "Docker Production Commands:" @echo " make up - Build and start production Docker services (localhost:2026)" - @echo " make up-pro - Build and start production Docker in Gateway mode (experimental)" @echo " make down - Stop and remove production Docker containers" @echo "" @echo "Docker Development Commands:" @echo " make docker-init - Pull the sandbox image" @echo " make docker-start - Start Docker services (mode-aware from config.yaml, localhost:2026)" - @echo " make docker-start-pro - Start Docker in Gateway mode (experimental, no LangGraph container)" @echo " make docker-stop - Stop Docker development services" @echo " make docker-logs - View Docker development logs" @echo " make docker-logs-frontend - View Docker frontend logs" @@ -73,6 +67,8 @@ install: @cd backend && uv sync @echo "Installing frontend dependencies..." @cd frontend && pnpm install + @echo "Installing pre-commit hooks..." + @$(BACKEND_UV_RUN) --with pre-commit pre-commit install @echo "✓ All dependencies installed" @echo "" @echo "==========================================" @@ -121,41 +117,21 @@ dev: @$(PYTHON) ./scripts/check.py @$(RUN_WITH_GIT_BASH) ./scripts/serve.sh --dev -# Start all services in dev + Gateway mode (experimental: agent runtime embedded in Gateway) -dev-pro: - @$(PYTHON) ./scripts/check.py - @$(RUN_WITH_GIT_BASH) ./scripts/serve.sh --dev --gateway - # Start all services in production mode (with optimizations) start: @$(PYTHON) ./scripts/check.py @$(RUN_WITH_GIT_BASH) ./scripts/serve.sh --prod -# Start all services in prod + Gateway mode (experimental) -start-pro: - @$(PYTHON) ./scripts/check.py - @$(RUN_WITH_GIT_BASH) ./scripts/serve.sh --prod --gateway - # Start all services in daemon mode (background) dev-daemon: @$(PYTHON) ./scripts/check.py @$(RUN_WITH_GIT_BASH) ./scripts/serve.sh --dev --daemon -# Start daemon + Gateway mode (experimental) -dev-daemon-pro: - @$(PYTHON) ./scripts/check.py - @$(RUN_WITH_GIT_BASH) ./scripts/serve.sh --dev --gateway --daemon - # Start prod services in daemon mode (background) start-daemon: @$(PYTHON) ./scripts/check.py @$(RUN_WITH_GIT_BASH) ./scripts/serve.sh --prod --daemon -# Start prod daemon + Gateway mode (experimental) -start-daemon-pro: - @$(PYTHON) ./scripts/check.py - @$(RUN_WITH_GIT_BASH) ./scripts/serve.sh --prod --gateway --daemon - # Stop all services stop: @$(RUN_WITH_GIT_BASH) ./scripts/serve.sh --stop @@ -180,10 +156,6 @@ docker-init: docker-start: @$(RUN_WITH_GIT_BASH) ./scripts/docker.sh start -# Start Docker in Gateway mode (experimental) -docker-start-pro: - @$(RUN_WITH_GIT_BASH) ./scripts/docker.sh start --gateway - # Stop Docker development environment docker-stop: @$(RUN_WITH_GIT_BASH) ./scripts/docker.sh stop @@ -206,10 +178,6 @@ docker-logs-gateway: up: @$(RUN_WITH_GIT_BASH) ./scripts/deploy.sh -# Build and start production services in Gateway mode -up-pro: - @$(RUN_WITH_GIT_BASH) ./scripts/deploy.sh --gateway - # Stop and remove production containers down: @$(RUN_WITH_GIT_BASH) ./scripts/deploy.sh down diff --git a/README.md b/README.md index e9ca2c174..c67fdc005 100644 --- a/README.md +++ b/README.md @@ -243,9 +243,6 @@ make up # Build images and start all production services make down # Stop and remove containers ``` -> [!NOTE] -> The LangGraph agent server currently runs via `langgraph dev` (the open-source CLI server). - Access: http://localhost:2026 See [CONTRIBUTING.md](CONTRIBUTING.md) for detailed Docker development guide. @@ -264,7 +261,7 @@ On Windows, run the local development flow from Git Bash. Native `cmd.exe` and P 2. **Install dependencies**: ```bash - make install # Install backend + frontend dependencies + make install # Install backend + frontend dependencies + pre-commit hooks ``` 3. **(Optional) Pre-pull sandbox image**: @@ -289,53 +286,31 @@ On Windows, run the local development flow from Git Bash. Native `cmd.exe` and P #### Startup Modes -DeerFlow supports multiple startup modes across two dimensions: - -- **Dev / Prod** — dev enables hot-reload; prod uses pre-built frontend -- **Standard / Gateway** — standard uses a separate LangGraph server (4 processes); Gateway mode (experimental) embeds the agent runtime in the Gateway API (3 processes) +DeerFlow runs the agent runtime inside the Gateway API. Development mode enables hot-reload; production mode uses a pre-built frontend. | | **Local Foreground** | **Local Daemon** | **Docker Dev** | **Docker Prod** | |---|---|---|---|---| | **Dev** | `./scripts/serve.sh --dev`
`make dev` | `./scripts/serve.sh --dev --daemon`
`make dev-daemon` | `./scripts/docker.sh start`
`make docker-start` | — | -| **Dev + Gateway** | `./scripts/serve.sh --dev --gateway`
`make dev-pro` | `./scripts/serve.sh --dev --gateway --daemon`
`make dev-daemon-pro` | `./scripts/docker.sh start --gateway`
`make docker-start-pro` | — | | **Prod** | `./scripts/serve.sh --prod`
`make start` | `./scripts/serve.sh --prod --daemon`
`make start-daemon` | — | `./scripts/deploy.sh`
`make up` | -| **Prod + Gateway** | `./scripts/serve.sh --prod --gateway`
`make start-pro` | `./scripts/serve.sh --prod --gateway --daemon`
`make start-daemon-pro` | — | `./scripts/deploy.sh --gateway`
`make up-pro` | | Action | Local | Docker Dev | Docker Prod | |---|---|---|---| | **Stop** | `./scripts/serve.sh --stop`
`make stop` | `./scripts/docker.sh stop`
`make docker-stop` | `./scripts/deploy.sh down`
`make down` | | **Restart** | `./scripts/serve.sh --restart [flags]` | `./scripts/docker.sh restart` | — | -> **Gateway mode** eliminates the LangGraph server process — the Gateway API handles agent execution directly via async tasks, managing its own concurrency. - -#### Why Gateway Mode? - -In standard mode, DeerFlow runs a dedicated [LangGraph Platform](https://langchain-ai.github.io/langgraph/) server alongside the Gateway API. This architecture works well but has trade-offs: - -| | Standard Mode | Gateway Mode | -|---|---|---| -| **Architecture** | Gateway (REST API) + LangGraph (agent runtime) | Gateway embeds agent runtime | -| **Concurrency** | `--n-jobs-per-worker` per worker (requires license) | `--workers` × async tasks (no per-worker cap) | -| **Containers / Processes** | 4 (frontend, gateway, langgraph, nginx) | 3 (frontend, gateway, nginx) | -| **Resource usage** | Higher (two Python runtimes) | Lower (single Python runtime) | -| **LangGraph Platform license** | Required for production images | Not required | -| **Cold start** | Slower (two services to initialize) | Faster | - -Both modes are functionally equivalent — the same agents, tools, and skills work in either mode. +Gateway owns `/api/langgraph/*` and translates those public LangGraph-compatible paths to its native `/api/*` routers behind nginx. #### Docker Production Deployment -`deploy.sh` supports building and starting separately. Images are mode-agnostic — runtime mode is selected at start time: +`deploy.sh` supports building and starting separately: ```bash # One-step (build + start) -deploy.sh # standard mode (default) -deploy.sh --gateway # gateway mode +deploy.sh -# Two-step (build once, start with any mode) +# Two-step (build once, start later) deploy.sh build # build all images -deploy.sh start # start in standard mode -deploy.sh start --gateway # start in gateway mode +deploy.sh start # start pre-built images # Stop deploy.sh down @@ -370,13 +345,14 @@ DeerFlow supports receiving tasks from messaging apps. Channels auto-start when | Feishu / Lark | WebSocket | Moderate | | WeChat | Tencent iLink (long-polling) | Moderate | | WeCom | WebSocket | Moderate | +| DingTalk | Stream Push (WebSocket) | Moderate | **Configuration in `config.yaml`:** ```yaml channels: - # LangGraph Server URL (default: http://localhost:2024) - langgraph_url: http://localhost:2024 + # LangGraph-compatible Gateway API base URL (default: http://localhost:8001/api) + langgraph_url: http://localhost:8001/api # Gateway API URL (default: http://localhost:8001) gateway_url: http://localhost:8001 @@ -439,11 +415,19 @@ channels: context: thinking_enabled: true subagent_enabled: true + + dingtalk: + enabled: true + client_id: $DINGTALK_CLIENT_ID # Client ID of your DingTalk application + client_secret: $DINGTALK_CLIENT_SECRET # Client Secret of your DingTalk application + allowed_users: [] # empty = allow all + card_template_id: "" # Optional: AI Card template ID for streaming typewriter effect ``` Notes: - `assistant_id: lead_agent` calls the default LangGraph assistant directly. - If `assistant_id` is set to a custom agent name, DeerFlow still routes through `lead_agent` and injects that value as `agent_name`, so the custom agent's SOUL/config takes effect for IM channels. +- IM channel workers call Gateway's LangGraph-compatible API internally and automatically attach process-local internal auth plus the CSRF cookie/header pair required for thread and run creation. Set the corresponding API keys in your `.env` file: @@ -466,6 +450,10 @@ WECHAT_ILINK_BOT_ID=your_ilink_bot_id # WeCom WECOM_BOT_ID=your_bot_id WECOM_BOT_SECRET=your_bot_secret + +# DingTalk +DINGTALK_CLIENT_ID=your_client_id +DINGTALK_CLIENT_SECRET=your_client_secret ``` **Telegram Setup** @@ -504,7 +492,15 @@ WECOM_BOT_SECRET=your_bot_secret 4. Make sure backend dependencies include `wecom-aibot-python-sdk`. The channel uses a WebSocket long connection and does not require a public callback URL. 5. The current integration supports inbound text, image, and file messages. Final images/files generated by the agent are also sent back to the WeCom conversation. -When DeerFlow runs in Docker Compose, IM channels execute inside the `gateway` container. In that case, do not point `channels.langgraph_url` or `channels.gateway_url` at `localhost`; use container service names such as `http://langgraph:2024` and `http://gateway:8001`, or set `DEER_FLOW_CHANNELS_LANGGRAPH_URL` and `DEER_FLOW_CHANNELS_GATEWAY_URL`. +**DingTalk Setup** + +1. Create a DingTalk application in the [DingTalk Developer Console](https://open.dingtalk.com/) and enable **Robot** capability. +2. Set the message receiving mode to **Stream Mode** in the robot configuration page. +3. Copy the `Client ID` and `Client Secret`, set `DINGTALK_CLIENT_ID` and `DINGTALK_CLIENT_SECRET` in `.env`, and enable the channel in `config.yaml`. +4. *(Optional)* To enable streaming AI Card replies (typewriter effect), create an **AI Card** template on the [DingTalk Card Platform](https://open.dingtalk.com/document/dingstart/typewriter-effect-streaming-ai-card), then set `card_template_id` in `config.yaml` to the template ID. You also need to apply for the `Card.Streaming.Write` and `Card.Instance.Write` permissions. + + +When DeerFlow runs in Docker Compose, IM channels execute inside the `gateway` container. In that case, do not point `channels.langgraph_url` or `channels.gateway_url` at `localhost`; use container service names such as `http://gateway:8001/api` and `http://gateway:8001`, or set `DEER_FLOW_CHANNELS_LANGGRAPH_URL` and `DEER_FLOW_CHANNELS_GATEWAY_URL`. **Commands** diff --git a/README_fr.md b/README_fr.md index e7684a5f8..3b8dc3d41 100644 --- a/README_fr.md +++ b/README_fr.md @@ -290,6 +290,7 @@ DeerFlow peut recevoir des tâches depuis des applications de messagerie. Les ca | Telegram | Bot API (long-polling) | Facile | | Slack | Socket Mode | Modérée | | Feishu / Lark | WebSocket | Modérée | +| DingTalk | Stream Push (WebSocket) | Modérée | **Configuration dans `config.yaml` :** @@ -341,6 +342,13 @@ channels: context: thinking_enabled: true subagent_enabled: true + + dingtalk: + enabled: true + client_id: $DINGTALK_CLIENT_ID # ClientId depuis DingTalk Open Platform + client_secret: $DINGTALK_CLIENT_SECRET # ClientSecret depuis DingTalk Open Platform + allowed_users: [] # vide = tout le monde autorisé + card_template_id: "" # Optionnel : ID de modèle AI Card pour l'effet machine à écrire en streaming ``` Définissez les clés API correspondantes dans votre fichier `.env` : @@ -356,6 +364,10 @@ SLACK_APP_TOKEN=xapp-... # Feishu / Lark FEISHU_APP_ID=cli_xxxx FEISHU_APP_SECRET=your_app_secret + +# DingTalk +DINGTALK_CLIENT_ID=your_client_id +DINGTALK_CLIENT_SECRET=your_client_secret ``` **Configuration Telegram** @@ -378,6 +390,13 @@ FEISHU_APP_SECRET=your_app_secret 3. Dans **Events**, abonnez-vous à `im.message.receive_v1` et sélectionnez le mode **Long Connection**. 4. Copiez l'App ID et l'App Secret. Définissez `FEISHU_APP_ID` et `FEISHU_APP_SECRET` dans `.env` et activez le canal dans `config.yaml`. +**Configuration DingTalk** + +1. Créez une application sur [DingTalk Open Platform](https://open.dingtalk.com/) et activez la capacité **Robot**. +2. Dans la page de configuration du robot, définissez le mode de réception des messages sur **Stream**. +3. Copiez le `Client ID` et le `Client Secret`. Définissez `DINGTALK_CLIENT_ID` et `DINGTALK_CLIENT_SECRET` dans `.env` et activez le canal dans `config.yaml`. +4. *(Optionnel)* Pour activer les réponses en streaming AI Card (effet machine à écrire), créez un modèle **AI Card** sur la [plateforme de cartes DingTalk](https://open.dingtalk.com/document/dingstart/typewriter-effect-streaming-ai-card), puis définissez `card_template_id` dans `config.yaml` avec l'ID du modèle. Vous devez également demander les permissions `Card.Streaming.Write` et `Card.Instance.Write`. + **Commandes** Une fois un canal connecté, vous pouvez interagir avec DeerFlow directement depuis le chat : diff --git a/README_ja.md b/README_ja.md index 3e0ff4c85..d2ba81750 100644 --- a/README_ja.md +++ b/README_ja.md @@ -243,6 +243,7 @@ DeerFlowはメッセージングアプリからのタスク受信をサポート | Telegram | Bot API(ロングポーリング) | 簡単 | | Slack | Socket Mode | 中程度 | | Feishu / Lark | WebSocket | 中程度 | +| DingTalk | Stream Push(WebSocket) | 中程度 | **`config.yaml`での設定:** @@ -294,6 +295,13 @@ channels: context: thinking_enabled: true subagent_enabled: true + + dingtalk: + enabled: true + client_id: $DINGTALK_CLIENT_ID # DingTalk Open PlatformのClientId + client_secret: $DINGTALK_CLIENT_SECRET # DingTalk Open PlatformのClientSecret + allowed_users: [] # 空 = 全員許可 + card_template_id: "" # オプション:ストリーミングタイプライター効果用のAIカードテンプレートID ``` 対応するAPIキーを`.env`ファイルに設定します: @@ -309,6 +317,10 @@ SLACK_APP_TOKEN=xapp-... # Feishu / Lark FEISHU_APP_ID=cli_xxxx FEISHU_APP_SECRET=your_app_secret + +# DingTalk +DINGTALK_CLIENT_ID=your_client_id +DINGTALK_CLIENT_SECRET=your_client_secret ``` **Telegramのセットアップ** @@ -331,6 +343,13 @@ FEISHU_APP_SECRET=your_app_secret 3. **イベント**で`im.message.receive_v1`を購読し、**ロングコネクション**モードを選択。 4. App IDとApp Secretをコピー。`.env`に`FEISHU_APP_ID`と`FEISHU_APP_SECRET`を設定し、`config.yaml`でチャネルを有効にします。 +**DingTalkのセットアップ** + +1. [DingTalk Open Platform](https://open.dingtalk.com/)でアプリを作成し、**ロボット**機能を有効化します。 +2. ロボット設定ページでメッセージ受信モードを**Streamモード**に設定します。 +3. `Client ID`と`Client Secret`をコピー。`.env`に`DINGTALK_CLIENT_ID`と`DINGTALK_CLIENT_SECRET`を設定し、`config.yaml`でチャネルを有効にします。 +4. *(オプション)* ストリーミングAIカード返信(タイプライター効果)を有効にするには、[DingTalkカードプラットフォーム](https://open.dingtalk.com/document/dingstart/typewriter-effect-streaming-ai-card)で**AIカード**テンプレートを作成し、`config.yaml`の`card_template_id`にテンプレートIDを設定します。`Card.Streaming.Write` および `Card.Instance.Write` 権限の申請も必要です。 + **コマンド** チャネル接続後、チャットから直接DeerFlowと対話できます: diff --git a/README_ru.md b/README_ru.md index 6ee30ebc6..e74feaeac 100644 --- a/README_ru.md +++ b/README_ru.md @@ -256,6 +256,7 @@ DeerFlow принимает задачи прямо из мессенджеро | Telegram | Bot API (long-polling) | Просто | | Slack | Socket Mode | Средне | | Feishu / Lark | WebSocket | Средне | +| DingTalk | Stream Push (WebSocket) | Средне | **Конфигурация в `config.yaml`:** @@ -278,6 +279,13 @@ channels: enabled: true bot_token: $TELEGRAM_BOT_TOKEN allowed_users: [] + + dingtalk: + enabled: true + client_id: $DINGTALK_CLIENT_ID # ClientId с DingTalk Open Platform + client_secret: $DINGTALK_CLIENT_SECRET # ClientSecret с DingTalk Open Platform + allowed_users: [] # пусто = разрешить всем + card_template_id: "" # Опционально: ID шаблона AI Card для потокового эффекта печатной машинки ``` **Настройка Telegram** @@ -285,6 +293,13 @@ channels: 1. Напишите [@BotFather](https://t.me/BotFather), отправьте `/newbot` и скопируйте HTTP API-токен. 2. Укажите `TELEGRAM_BOT_TOKEN` в `.env` и включите канал в `config.yaml`. +**Настройка DingTalk** + +1. Создайте приложение на [DingTalk Open Platform](https://open.dingtalk.com/) и включите возможность **Робот**. +2. На странице настроек робота установите режим приёма сообщений на **Stream**. +3. Скопируйте `Client ID` и `Client Secret`. Укажите `DINGTALK_CLIENT_ID` и `DINGTALK_CLIENT_SECRET` в `.env` и включите канал в `config.yaml`. +4. *(Опционально)* Для включения потоковых ответов AI Card (эффект печатной машинки) создайте шаблон **AI Card** на [платформе карточек DingTalk](https://open.dingtalk.com/document/dingstart/typewriter-effect-streaming-ai-card), затем укажите `card_template_id` в `config.yaml` с ID шаблона. Также необходимо запросить разрешения `Card.Streaming.Write` и `Card.Instance.Write`. + **Доступные команды** | Команда | Описание | diff --git a/README_zh.md b/README_zh.md index f6043ff86..6e4a618c7 100644 --- a/README_zh.md +++ b/README_zh.md @@ -248,6 +248,7 @@ DeerFlow 支持从即时通讯应用接收任务。只要配置完成,对应 | Slack | Socket Mode | 中等 | | Feishu / Lark | WebSocket | 中等 | | 企业微信智能机器人 | WebSocket | 中等 | +| 钉钉 | Stream Push(WebSocket) | 中等 | **`config.yaml` 中的配置示例:** @@ -304,6 +305,13 @@ channels: context: thinking_enabled: true subagent_enabled: true + + dingtalk: + enabled: true + client_id: $DINGTALK_CLIENT_ID # 钉钉开放平台 ClientId + client_secret: $DINGTALK_CLIENT_SECRET # 钉钉开放平台 ClientSecret + allowed_users: [] # 留空表示允许所有人 + card_template_id: "" # 可选:AI 卡片模板 ID,用于流式打字机效果 ``` 说明: @@ -327,6 +335,10 @@ FEISHU_APP_SECRET=your_app_secret # 企业微信智能机器人 WECOM_BOT_ID=your_bot_id WECOM_BOT_SECRET=your_bot_secret + +# 钉钉 +DINGTALK_CLIENT_ID=your_client_id +DINGTALK_CLIENT_SECRET=your_client_secret ``` **Telegram 配置** @@ -357,6 +369,13 @@ WECOM_BOT_SECRET=your_bot_secret 4. 安装后端依赖时确保包含 `wecom-aibot-python-sdk`,渠道会通过 WebSocket 长连接接收消息,无需公网回调地址。 5. 当前支持文本、图片和文件入站消息;agent 生成的最终图片/文件也会回传到企业微信会话中。 +**钉钉配置** + +1. 在 [钉钉开放平台](https://open.dingtalk.com/) 创建应用,并启用 **机器人** 能力。 +2. 在机器人配置页面设置消息接收模式为 **Stream模式**。 +3. 复制 `Client ID` 和 `Client Secret`,在 `.env` 中设置 `DINGTALK_CLIENT_ID` 和 `DINGTALK_CLIENT_SECRET`,并在 `config.yaml` 中启用该渠道。 +4. *(可选)* 如需开启流式 AI 卡片回复(打字机效果),请在[钉钉卡片平台](https://open.dingtalk.com/document/dingstart/typewriter-effect-streaming-ai-card)创建 **AI 卡片**模板,然后在 `config.yaml` 中将 `card_template_id` 设为该模板 ID。同时需要申请 `Card.Streaming.Write` 和 `Card.Instance.Write` 权限。 + **命令** 渠道连接完成后,你可以直接在聊天窗口里和 DeerFlow 交互: diff --git a/backend/CLAUDE.md b/backend/CLAUDE.md index 7a2242d7e..b185ce4a1 100644 --- a/backend/CLAUDE.md +++ b/backend/CLAUDE.md @@ -7,15 +7,13 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co DeerFlow is a LangGraph-based AI super agent system with a full-stack architecture. The backend provides a "super agent" with sandbox execution, persistent memory, subagent delegation, and extensible tool integration - all operating in per-thread isolated environments. **Architecture**: -- **LangGraph Server** (port 2024): Agent runtime and workflow execution -- **Gateway API** (port 8001): REST API for models, MCP, skills, memory, artifacts, uploads, and local thread cleanup +- **Gateway API** (port 8001): REST API plus embedded LangGraph-compatible agent runtime - **Frontend** (port 3000): Next.js web interface - **Nginx** (port 2026): Unified reverse proxy entry point - **Provisioner** (port 8002, optional in Docker dev): Started only when sandbox is configured for provisioner/Kubernetes mode -**Runtime Modes**: -- **Standard mode** (`make dev`): LangGraph Server handles agent execution as a separate process. 4 processes total. -- **Gateway mode** (`make dev-pro`, experimental): Agent runtime embedded in Gateway via `RunManager` + `run_agent()` + `StreamBridge` (`packages/harness/deerflow/runtime/`). Service manages its own concurrency via async tasks. 3 processes total, no LangGraph Server. +**Runtime**: +- `make dev`, Docker dev, and production all run the agent runtime in Gateway via `RunManager` + `run_agent()` + `StreamBridge` (`packages/harness/deerflow/runtime/`). Nginx exposes that runtime at `/api/langgraph/*` and rewrites it to Gateway's native `/api/*` routers. **Project Structure**: ``` @@ -25,7 +23,7 @@ deer-flow/ ├── extensions_config.json # MCP servers and skills configuration ├── backend/ # Backend application (this directory) │ ├── Makefile # Backend-only commands (dev, gateway, lint) -│ ├── langgraph.json # LangGraph server configuration +│ ├── langgraph.json # LangGraph Studio graph configuration │ ├── packages/ │ │ └── harness/ # deerflow-harness package (import: deerflow.*) │ │ ├── pyproject.toml @@ -83,16 +81,15 @@ When making code changes, you MUST update the relevant documentation: ```bash make check # Check system requirements make install # Install all dependencies (frontend + backend) -make dev # Start all services (LangGraph + Gateway + Frontend + Nginx), with config.yaml preflight -make dev-pro # Gateway mode (experimental): skip LangGraph, agent runtime embedded in Gateway -make start-pro # Production + Gateway mode (experimental) +make dev # Start all services (Gateway + Frontend + Nginx), with config.yaml preflight +make start # Start production services locally make stop # Stop all services ``` **Backend directory** (for backend development only): ```bash make install # Install backend dependencies -make dev # Run LangGraph server only (port 2024) +make dev # Run Gateway API with reload (port 8001) make gateway # Run Gateway API only (port 8001) make test # Run all backend tests make lint # Lint with ruff @@ -115,7 +112,7 @@ CI runs these regression tests for every pull request via [.github/workflows/bac The backend is split into two layers with a strict dependency direction: - **Harness** (`packages/harness/deerflow/`): Publishable agent framework package (`deerflow-harness`). Import prefix: `deerflow.*`. Contains agent orchestration, tools, sandbox, models, MCP, skills, config — everything needed to build and run agents. -- **App** (`app/`): Unpublished application code. Import prefix: `app.*`. Contains the FastAPI Gateway API and IM channel integrations (Feishu, Slack, Telegram). +- **App** (`app/`): Unpublished application code. Import prefix: `app.*`. Contains the FastAPI Gateway API and IM channel integrations (Feishu, Slack, Telegram, DingTalk). **Dependency rule**: App imports deerflow, but deerflow never imports app. This boundary is enforced by `tests/test_harness_boundary.py` which runs in CI. @@ -158,7 +155,7 @@ from deerflow.config import get_app_config Lead-agent middlewares are assembled in strict append order across `packages/harness/deerflow/agents/middlewares/tool_error_handling_middleware.py` (`build_lead_runtime_middlewares`) and `packages/harness/deerflow/agents/lead_agent/agent.py` (`_build_middlewares`): -1. **ThreadDataMiddleware** - Creates per-thread directories (`backend/.deer-flow/threads/{thread_id}/user-data/{workspace,uploads,outputs}`); Web UI thread deletion now follows LangGraph thread removal with Gateway cleanup of the local `.deer-flow/threads/{thread_id}` directory +1. **ThreadDataMiddleware** - Creates per-thread directories under the user's isolation scope (`backend/.deer-flow/users/{user_id}/threads/{thread_id}/user-data/{workspace,uploads,outputs}`); resolves `user_id` via `get_effective_user_id()` (falls back to `"default"` in no-auth mode); Web UI thread deletion now follows LangGraph thread removal with Gateway cleanup of the local thread directory 2. **UploadsMiddleware** - Tracks and injects newly uploaded files into conversation 3. **SandboxMiddleware** - Acquires sandbox, stores `sandbox_id` in state 4. **DanglingToolCallMiddleware** - Injects placeholder ToolMessages for AIMessage tool_calls that lack responses (e.g., due to user interruption), including raw provider tool-call payloads preserved only in `additional_kwargs["tool_calls"]` @@ -208,7 +205,7 @@ Configuration priority: ### Gateway API (`app/gateway/`) -FastAPI application on port 8001 with health check at `GET /health`. +FastAPI application on port 8001 with health check at `GET /health`. Set `GATEWAY_ENABLE_DOCS=false` to disable `/docs`, `/redoc`, and `/openapi.json` in production (default: enabled). **Routers**: @@ -222,6 +219,9 @@ FastAPI application on port 8001 with health check at `GET /health`. | **Threads** (`/api/threads/{id}`) | `DELETE /` - remove DeerFlow-managed local thread data after LangGraph thread deletion; unexpected failures are logged server-side and return a generic 500 detail | | **Artifacts** (`/api/threads/{id}/artifacts`) | `GET /{path}` - serve artifacts; active content types (`text/html`, `application/xhtml+xml`, `image/svg+xml`) are always forced as download attachments to reduce XSS risk; `?download=true` still forces download for other file types | | **Suggestions** (`/api/threads/{id}/suggestions`) | `POST /` - generate follow-up questions; rich list/block model content is normalized before JSON parsing | +| **Thread Runs** (`/api/threads/{id}/runs`) | `POST /` - create background run; `POST /stream` - create + SSE stream; `POST /wait` - create + block; `GET /` - list runs; `GET /{rid}` - run details; `POST /{rid}/cancel` - cancel; `GET /{rid}/join` - join SSE; `GET /{rid}/messages` - paginated messages `{data, has_more}`; `GET /{rid}/events` - full event stream; `GET /../messages` - thread messages with feedback; `GET /../token-usage` - aggregate tokens | +| **Feedback** (`/api/threads/{id}/runs/{rid}/feedback`) | `PUT /` - upsert feedback; `DELETE /` - delete user feedback; `POST /` - create feedback; `GET /` - list feedback; `GET /stats` - aggregate stats; `DELETE /{fid}` - delete specific | +| **Runs** (`/api/runs`) | `POST /stream` - stateless run + SSE; `POST /wait` - stateless run + block; `GET /{rid}/messages` - paginated messages by run_id `{data, has_more}` (cursor: `after_seq`/`before_seq`); `GET /{rid}/feedback` - list feedback by run_id | Proxied through nginx: `/api/langgraph/*` → LangGraph, all other `/api/*` → Gateway. @@ -235,7 +235,7 @@ Proxied through nginx: `/api/langgraph/*` → LangGraph, all other `/api/*` → **Virtual Path System**: - Agent sees: `/mnt/user-data/{workspace,uploads,outputs}`, `/mnt/skills` -- Physical: `backend/.deer-flow/threads/{thread_id}/user-data/...`, `deer-flow/skills/` +- Physical: `backend/.deer-flow/users/{user_id}/threads/{thread_id}/user-data/...`, `deer-flow/skills/` - Translation: `replace_virtual_path()` / `replace_virtual_paths_in_command()` - Detection: `is_local_sandbox()` checks `sandbox_id == "local"` @@ -275,7 +275,7 @@ Proxied through nginx: `/api/langgraph/*` → LangGraph, all other `/api/*` → - `invoke_acp_agent` - Invokes external ACP-compatible agents from `config.yaml` - ACP launchers must be real ACP adapters. The standard `codex` CLI is not ACP-compatible by itself; configure a wrapper such as `npx -y @zed-industries/codex-acp` or an installed `codex-acp` binary - Missing ACP executables now return an actionable error message instead of a raw `[Errno 2]` -- Each ACP agent uses a per-thread workspace at `{base_dir}/threads/{thread_id}/acp-workspace/`. The workspace is accessible to the lead agent via the virtual path `/mnt/acp-workspace/` (read-only). In docker sandbox mode, the directory is volume-mounted into the container at `/mnt/acp-workspace` (read-only); in local sandbox mode, path translation is handled by `tools.py` +- Each ACP agent uses a per-thread workspace at `{base_dir}/users/{user_id}/threads/{thread_id}/acp-workspace/`. The workspace is accessible to the lead agent via the virtual path `/mnt/acp-workspace/` (read-only). In docker sandbox mode, the directory is volume-mounted into the container at `/mnt/acp-workspace` (read-only); in local sandbox mode, path translation is handled by `tools.py` - `image_search/` - Image search via DuckDuckGo ### MCP System (`packages/harness/deerflow/mcp/`) @@ -312,9 +312,10 @@ Proxied through nginx: `/api/langgraph/*` → LangGraph, all other `/api/*` → ### IM Channels System (`app/channels/`) -Bridges external messaging platforms (Feishu, Slack, Telegram) to the DeerFlow agent via the LangGraph Server. +Bridges external messaging platforms (Feishu, Slack, Telegram, DingTalk) to the DeerFlow agent via the LangGraph Server. -**Architecture**: Channels communicate with the LangGraph Server through `langgraph-sdk` HTTP client (same as the frontend), ensuring threads are created and managed server-side. + +**Architecture**: Channels communicate with Gateway through the `langgraph-sdk` HTTP client (same as the frontend), ensuring threads are created and managed server-side. The internal SDK client injects process-local internal auth plus a matching CSRF cookie/header pair so Gateway accepts state-changing thread/run requests from channel workers without relying on browser session cookies. **Components**: - `message_bus.py` - Async pub/sub hub (`InboundMessage` → queue → dispatcher; `OutboundMessage` → callbacks → channels) @@ -322,40 +323,51 @@ Bridges external messaging platforms (Feishu, Slack, Telegram) to the DeerFlow a - `manager.py` - Core dispatcher: creates threads via `client.threads.create()`, routes commands, keeps Slack/Telegram on `client.runs.wait()`, and uses `client.runs.stream(["messages-tuple", "values"])` for Feishu incremental outbound updates - `base.py` - Abstract `Channel` base class (start/stop/send lifecycle) - `service.py` - Manages lifecycle of all configured channels from `config.yaml` -- `slack.py` / `feishu.py` / `telegram.py` - Platform-specific implementations (`feishu.py` tracks the running card `message_id` in memory and patches the same card in place) +- `slack.py` / `feishu.py` / `telegram.py` / `dingtalk.py` - Platform-specific implementations (`feishu.py` tracks the running card `message_id` in memory and patches the same card in place; `dingtalk.py` optionally uses AI Card streaming for in-place updates when `card_template_id` is configured) **Message Flow**: 1. External platform -> Channel impl -> `MessageBus.publish_inbound()` 2. `ChannelManager._dispatch_loop()` consumes from queue -3. For chat: look up/create thread on LangGraph Server +3. For chat: look up/create thread through Gateway's LangGraph-compatible API 4. Feishu chat: `runs.stream()` → accumulate AI text → publish multiple outbound updates (`is_final=False`) → publish final outbound (`is_final=True`) 5. Slack/Telegram chat: `runs.wait()` → extract final response → publish outbound 6. Feishu channel sends one running reply card up front, then patches the same card for each outbound update (card JSON sets `config.update_multi=true` for Feishu's patch API requirement) -7. For commands (`/new`, `/status`, `/models`, `/memory`, `/help`): handle locally or query Gateway API -8. Outbound → channel callbacks → platform reply +7. DingTalk AI Card mode (when `card_template_id` configured): `runs.stream()` → create card with initial text → stream updates via `PUT /v1.0/card/streaming` → finalize on `is_final=True`. Falls back to `sampleMarkdown` if card creation or streaming fails +8. For commands (`/new`, `/status`, `/models`, `/memory`, `/help`): handle locally or query Gateway API +9. Outbound → channel callbacks → platform reply **Configuration** (`config.yaml` -> `channels`): -- `langgraph_url` - LangGraph Server URL (default: `http://localhost:2024`) +- `langgraph_url` - LangGraph-compatible Gateway API base URL (default: `http://localhost:8001/api`) - `gateway_url` - Gateway API URL for auxiliary commands (default: `http://localhost:8001`) -- In Docker Compose, IM channels run inside the `gateway` container, so `localhost` points back to that container. Use `http://langgraph:2024` / `http://gateway:8001`, or set `DEER_FLOW_CHANNELS_LANGGRAPH_URL` / `DEER_FLOW_CHANNELS_GATEWAY_URL`. -- Per-channel configs: `feishu` (app_id, app_secret), `slack` (bot_token, app_token), `telegram` (bot_token) +- In Docker Compose, IM channels run inside the `gateway` container, so `localhost` points back to that container. Use `http://gateway:8001/api` for `langgraph_url` and `http://gateway:8001` for `gateway_url`, or set `DEER_FLOW_CHANNELS_LANGGRAPH_URL` / `DEER_FLOW_CHANNELS_GATEWAY_URL`. +- Per-channel configs: `feishu` (app_id, app_secret), `slack` (bot_token, app_token), `telegram` (bot_token), `dingtalk` (client_id, client_secret, optional `card_template_id` for AI Card streaming) + ### Memory System (`packages/harness/deerflow/agents/memory/`) **Components**: - `updater.py` - LLM-based memory updates with fact extraction, whitespace-normalized fact deduplication (trims leading/trailing whitespace before comparing), and atomic file I/O -- `queue.py` - Debounced update queue (per-thread deduplication, configurable wait time) +- `queue.py` - Debounced update queue (per-thread deduplication, configurable wait time); captures `user_id` at enqueue time so it survives the `threading.Timer` boundary - `prompt.py` - Prompt templates for memory updates +- `storage.py` - File-based storage with per-user isolation; cache keyed by `(user_id, agent_name)` tuple -**Data Structure** (stored in `backend/.deer-flow/memory.json`): +**Per-User Isolation**: +- Memory is stored per-user at `{base_dir}/users/{user_id}/memory.json` +- Per-agent per-user memory at `{base_dir}/users/{user_id}/agents/{agent_name}/memory.json` +- `user_id` is resolved via `get_effective_user_id()` from `deerflow.runtime.user_context` +- In no-auth mode, `user_id` defaults to `"default"` (constant `DEFAULT_USER_ID`) +- Absolute `storage_path` in config opts out of per-user isolation +- **Migration**: Run `PYTHONPATH=. python scripts/migrate_user_isolation.py` to move legacy `memory.json` and `threads/` into per-user layout; supports `--dry-run` + +**Data Structure** (stored in `{base_dir}/users/{user_id}/memory.json`): - **User Context**: `workContext`, `personalContext`, `topOfMind` (1-3 sentence summaries) - **History**: `recentMonths`, `earlierContext`, `longTermBackground` - **Facts**: Discrete facts with `id`, `content`, `category` (preference/knowledge/context/behavior/goal), `confidence` (0-1), `createdAt`, `source` **Workflow**: -1. `MemoryMiddleware` filters messages (user inputs + final AI responses) and queues conversation +1. `MemoryMiddleware` filters messages (user inputs + final AI responses), captures `user_id` via `get_effective_user_id()`, and queues conversation with the captured `user_id` 2. Queue debounces (30s default), batches updates, deduplicates per-thread -3. Background thread invokes LLM to extract context updates and facts +3. Background thread invokes LLM to extract context updates and facts, using the stored `user_id` (not the contextvar, which is unavailable on timer threads) 4. Applies updates atomically (temp file + rename) with cache invalidation, skipping duplicate fact content before append 5. Next interaction injects top 15 facts + context into `` tags in system prompt @@ -363,7 +375,7 @@ Focused regression coverage for the updater lives in `backend/tests/test_memory_ **Configuration** (`config.yaml` → `memory`): - `enabled` / `injection_enabled` - Master switches -- `storage_path` - Path to memory.json +- `storage_path` - Path to memory.json (absolute path opts out of per-user isolation) - `debounce_seconds` - Wait time before processing (default: 30) - `model_name` - LLM for updates (null = default model) - `max_facts` / `fact_confidence_threshold` - Fact storage limits (100 / 0.7) @@ -398,9 +410,9 @@ Both can be modified at runtime via Gateway API endpoints or `DeerFlowClient` me `DeerFlowClient` provides direct in-process access to all DeerFlow capabilities without HTTP services. All return types align with the Gateway API response schemas, so consumer code works identically in HTTP and embedded modes. -**Architecture**: Imports the same `deerflow` modules that LangGraph Server and Gateway API use. Shares the same config files and data directories. No FastAPI dependency. +**Architecture**: Imports the same `deerflow` modules that Gateway API uses. Shares the same config files and data directories. No FastAPI dependency. -**Agent Conversation** (replaces LangGraph Server): +**Agent Conversation**: - `chat(message, thread_id)` — synchronous, accumulates streaming deltas per message-id and returns the final AI text - `stream(message, thread_id)` — subscribes to LangGraph `stream_mode=["values", "messages", "custom"]` and yields `StreamEvent`: - `"values"` — full state snapshot (title, messages, artifacts); AI text already delivered via `messages` mode is **not** re-synthesized here to avoid duplicate deliveries @@ -463,20 +475,15 @@ This starts all services and makes the application available at `http://localhos | | **Local Foreground** | **Local Daemon** | **Docker Dev** | **Docker Prod** | |---|---|---|---|---| | **Dev** | `./scripts/serve.sh --dev`
`make dev` | `./scripts/serve.sh --dev --daemon`
`make dev-daemon` | `./scripts/docker.sh start`
`make docker-start` | — | -| **Dev + Gateway** | `./scripts/serve.sh --dev --gateway`
`make dev-pro` | `./scripts/serve.sh --dev --gateway --daemon`
`make dev-daemon-pro` | `./scripts/docker.sh start --gateway`
`make docker-start-pro` | — | | **Prod** | `./scripts/serve.sh --prod`
`make start` | `./scripts/serve.sh --prod --daemon`
`make start-daemon` | — | `./scripts/deploy.sh`
`make up` | -| **Prod + Gateway** | `./scripts/serve.sh --prod --gateway`
`make start-pro` | `./scripts/serve.sh --prod --gateway --daemon`
`make start-daemon-pro` | — | `./scripts/deploy.sh --gateway`
`make up-pro` | | Action | Local | Docker Dev | Docker Prod | |---|---|---|---| | **Stop** | `./scripts/serve.sh --stop`
`make stop` | `./scripts/docker.sh stop`
`make docker-stop` | `./scripts/deploy.sh down`
`make down` | | **Restart** | `./scripts/serve.sh --restart [flags]` | `./scripts/docker.sh restart` | — | -Gateway mode embeds the agent runtime in Gateway, no LangGraph server. - **Nginx routing**: -- Standard mode: `/api/langgraph/*` → LangGraph Server (2024) -- Gateway mode: `/api/langgraph/*` → Gateway embedded runtime (8001) (via envsubst) +- `/api/langgraph/*` → Gateway embedded runtime (8001), rewritten to `/api/*` - `/api/*` (other) → Gateway API (8001) - `/` (non-API) → Frontend (3000) @@ -485,15 +492,11 @@ Gateway mode embeds the agent runtime in Gateway, no LangGraph server. From the **backend** directory: ```bash -# Terminal 1: LangGraph server -make dev - -# Terminal 2: Gateway API +# Gateway API make gateway ``` Direct access (without nginx): -- LangGraph: `http://localhost:2024` - Gateway: `http://localhost:8001` ### Frontend Configuration diff --git a/backend/Dockerfile b/backend/Dockerfile index c0f59d2f1..c046268d3 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -13,6 +13,9 @@ FROM python:3.12-slim-bookworm AS builder ARG NODE_MAJOR=22 ARG APT_MIRROR ARG UV_INDEX_URL +# Optional extras to install (e.g. "postgres" for PostgreSQL support) +# Usage: docker build --build-arg UV_EXTRAS=postgres ... +ARG UV_EXTRAS # Optionally override apt mirror for restricted networks (e.g. APT_MIRROR=mirrors.aliyun.com) RUN if [ -n "${APT_MIRROR}" ]; then \ @@ -43,8 +46,9 @@ WORKDIR /app COPY backend ./backend # Install dependencies with cache mount +# When UV_EXTRAS is set (e.g. "postgres"), installs optional dependencies. RUN --mount=type=cache,target=/root/.cache/uv \ - sh -c "cd backend && UV_INDEX_URL=${UV_INDEX_URL:-https://pypi.org/simple} uv sync" + sh -c "cd backend && UV_INDEX_URL=${UV_INDEX_URL:-https://pypi.org/simple} uv sync ${UV_EXTRAS:+--extra $UV_EXTRAS}" # ── Stage 2: Dev ────────────────────────────────────────────────────────────── # Retains compiler toolchain from builder so startup-time `uv sync` can build diff --git a/backend/Makefile b/backend/Makefile index dd06742a0..81a055684 100644 --- a/backend/Makefile +++ b/backend/Makefile @@ -2,7 +2,7 @@ install: uv sync dev: - uv run langgraph dev --no-browser --no-reload --n-jobs-per-worker 10 + PYTHONPATH=. uv run uvicorn app.gateway.app:app --host 0.0.0.0 --port 8001 --reload gateway: PYTHONPATH=. uv run uvicorn app.gateway.app:app --host 0.0.0.0 --port 8001 diff --git a/backend/app/channels/__init__.py b/backend/app/channels/__init__.py index 4a583c074..77b17335e 100644 --- a/backend/app/channels/__init__.py +++ b/backend/app/channels/__init__.py @@ -2,7 +2,7 @@ Provides a pluggable channel system that connects external messaging platforms (Feishu/Lark, Slack, Telegram) to the DeerFlow agent via the ChannelManager, -which uses ``langgraph-sdk`` to communicate with the underlying LangGraph Server. +which uses ``langgraph-sdk`` to communicate with Gateway's LangGraph-compatible API. """ from app.channels.base import Channel diff --git a/backend/app/channels/base.py b/backend/app/channels/base.py index 95aecf267..baf542c48 100644 --- a/backend/app/channels/base.py +++ b/backend/app/channels/base.py @@ -31,6 +31,10 @@ class Channel(ABC): def is_running(self) -> bool: return self._running + @property + def supports_streaming(self) -> bool: + return False + # -- lifecycle --------------------------------------------------------- @abstractmethod diff --git a/backend/app/channels/dingtalk.py b/backend/app/channels/dingtalk.py new file mode 100644 index 000000000..f2833d4ff --- /dev/null +++ b/backend/app/channels/dingtalk.py @@ -0,0 +1,740 @@ +"""DingTalk channel implementation.""" + +from __future__ import annotations + +import asyncio +import json +import logging +import re +import threading +import time +from pathlib import Path +from typing import Any + +import httpx + +from app.channels.base import Channel +from app.channels.commands import KNOWN_CHANNEL_COMMANDS +from app.channels.message_bus import InboundMessage, InboundMessageType, MessageBus, OutboundMessage, ResolvedAttachment + +logger = logging.getLogger(__name__) + +DINGTALK_API_BASE = "https://api.dingtalk.com" + +_TOKEN_REFRESH_MARGIN_SECONDS = 300 + +_CONVERSATION_TYPE_P2P = "1" +_CONVERSATION_TYPE_GROUP = "2" + +_MAX_UPLOAD_SIZE_BYTES = 20 * 1024 * 1024 + + +def _normalize_conversation_type(raw: Any) -> str: + """Normalize ``conversationType`` to ``"1"`` (P2P) or ``"2"`` (group). + + Stream payloads may send int or string values. + """ + if raw is None: + return _CONVERSATION_TYPE_P2P + s = str(raw).strip() + if s == _CONVERSATION_TYPE_GROUP: + return _CONVERSATION_TYPE_GROUP + return _CONVERSATION_TYPE_P2P + + +def _normalize_allowed_users(allowed_users: Any) -> set[str]: + if allowed_users is None: + return set() + if isinstance(allowed_users, str): + values = [allowed_users] + elif isinstance(allowed_users, (list, tuple, set)): + values = allowed_users + else: + logger.warning( + "DingTalk allowed_users should be a list of user IDs; treating %s as one string value", + type(allowed_users).__name__, + ) + values = [allowed_users] + return {str(uid) for uid in values if str(uid)} + + +def _is_dingtalk_command(text: str) -> bool: + if not text.startswith("/"): + return False + return text.split(maxsplit=1)[0].lower() in KNOWN_CHANNEL_COMMANDS + + +def _extract_text_from_rich_text(rich_text_list: list) -> str: + parts: list[str] = [] + for item in rich_text_list: + if isinstance(item, dict) and "text" in item: + parts.append(item["text"]) + return " ".join(parts) + + +_FENCED_CODE_BLOCK_RE = re.compile(r"```(\w*)\n(.*?)```", re.DOTALL) +_INLINE_CODE_RE = re.compile(r"`([^`\n]+)`") +_HORIZONTAL_RULE_RE = re.compile(r"^-{3,}$", re.MULTILINE) +_TABLE_SEPARATOR_RE = re.compile(r"^\|[-:| ]+\|$", re.MULTILINE) + + +def _convert_markdown_table(text: str) -> str: + # DingTalk sampleMarkdown does not render pipe-delimited tables. + lines = text.split("\n") + result: list[str] = [] + i = 0 + while i < len(lines): + line = lines[i] + # Detect table: header row followed by separator row + if i + 1 < len(lines) and line.strip().startswith("|") and _TABLE_SEPARATOR_RE.match(lines[i + 1].strip()): + headers = [h.strip() for h in line.strip().strip("|").split("|")] + i += 2 # skip header + separator + while i < len(lines) and lines[i].strip().startswith("|"): + cells = [c.strip() for c in lines[i].strip().strip("|").split("|")] + for h, c in zip(headers, cells): + result.append(f"> **{h}**: {c}") + result.append("") + i += 1 + else: + result.append(line) + i += 1 + return "\n".join(result) + + +def _adapt_markdown_for_dingtalk(text: str) -> str: + """Adapt markdown for DingTalk's limited sampleMarkdown renderer.""" + + def _code_block_to_quote(match: re.Match) -> str: + lang = match.group(1) + code = match.group(2).rstrip("\n") + prefix = f"> **{lang}**\n" if lang else "" + quoted_lines = "\n".join(f"> {line}" for line in code.split("\n")) + return f"{prefix}{quoted_lines}\n" + + text = _FENCED_CODE_BLOCK_RE.sub(_code_block_to_quote, text) + text = _INLINE_CODE_RE.sub(r"**\1**", text) + text = _convert_markdown_table(text) + text = _HORIZONTAL_RULE_RE.sub("───────────", text) + return text + + +class DingTalkChannel(Channel): + """DingTalk IM channel using Stream Push (WebSocket, no public IP needed).""" + + def __init__(self, bus: MessageBus, config: dict[str, Any]) -> None: + super().__init__(name="dingtalk", bus=bus, config=config) + self._thread: threading.Thread | None = None + self._main_loop: asyncio.AbstractEventLoop | None = None + self._client_id: str = "" + self._client_secret: str = "" + self._allowed_users: set[str] = _normalize_allowed_users(config.get("allowed_users")) + self._cached_token: str = "" + self._token_expires_at: float = 0.0 + self._token_lock = asyncio.Lock() + self._card_template_id: str = config.get("card_template_id", "") + self._card_track_ids: dict[str, str] = {} + self._dingtalk_client: Any = None + self._stream_client: Any = None + self._incoming_messages: dict[str, Any] = {} + self._incoming_messages_lock = threading.Lock() + self._card_repliers: dict[str, Any] = {} + + @property + def supports_streaming(self) -> bool: + return bool(self._card_template_id) + + async def start(self) -> None: + if self._running: + return + + try: + import dingtalk_stream # noqa: F401 + except ImportError: + logger.error("dingtalk-stream is not installed. Install it with: uv add dingtalk-stream") + return + + client_id = self.config.get("client_id", "") + client_secret = self.config.get("client_secret", "") + + if not client_id or not client_secret: + logger.error("DingTalk channel requires client_id and client_secret") + return + + self._client_id = client_id + self._client_secret = client_secret + self._main_loop = asyncio.get_running_loop() + + if self._card_template_id: + logger.info("[DingTalk] AI Card mode enabled (template=%s)", self._card_template_id) + + self._running = True + self.bus.subscribe_outbound(self._on_outbound) + + self._thread = threading.Thread( + target=self._run_stream, + args=(client_id, client_secret), + daemon=True, + ) + self._thread.start() + logger.info("DingTalk channel started") + + async def stop(self) -> None: + self._running = False + self.bus.unsubscribe_outbound(self._on_outbound) + + stream_client = self._stream_client + if stream_client is not None: + try: + if hasattr(stream_client, "disconnect"): + stream_client.disconnect() + except Exception: + logger.debug("[DingTalk] error disconnecting stream client", exc_info=True) + + self._dingtalk_client = None + self._stream_client = None + with self._incoming_messages_lock: + self._incoming_messages.clear() + self._card_repliers.clear() + self._card_track_ids.clear() + if self._thread: + self._thread.join(timeout=5) + self._thread = None + logger.info("DingTalk channel stopped") + + def _resolve_routing(self, msg: OutboundMessage) -> tuple[str, str, str]: + """Return (conversation_type, sender_staff_id, conversation_id). + + Uses msg.chat_id as the primary routing key; metadata as fallback. + """ + conversation_type = _normalize_conversation_type(msg.metadata.get("conversation_type")) + sender_staff_id = msg.metadata.get("sender_staff_id", "") + conversation_id = msg.metadata.get("conversation_id", "") + if conversation_type == _CONVERSATION_TYPE_GROUP: + conversation_id = msg.chat_id or conversation_id + else: + sender_staff_id = msg.chat_id or sender_staff_id + return conversation_type, sender_staff_id, conversation_id + + async def send(self, msg: OutboundMessage, *, _max_retries: int = 3) -> None: + conversation_type, sender_staff_id, conversation_id = self._resolve_routing(msg) + robot_code = self._client_id + + # Card mode: stream update to existing AI card + source_key = self._make_card_source_key_from_outbound(msg) + out_track_id = self._card_track_ids.get(source_key) + + # ``card_template_id`` enables ``runs.stream`` (non-final + final outbounds). + # If card creation failed, skip non-final chunks to avoid duplicate messages. + if self._card_template_id and not out_track_id and not msg.is_final: + return + + if out_track_id: + try: + await self._stream_update_card( + out_track_id, + msg.text, + is_finalize=msg.is_final, + ) + except Exception: + logger.warning("[DingTalk] card stream failed, falling back to sampleMarkdown") + if msg.is_final: + self._card_track_ids.pop(source_key, None) + self._card_repliers.pop(out_track_id, None) + await self._send_markdown_fallback(robot_code, conversation_type, sender_staff_id, conversation_id, msg.text) + return + if msg.is_final: + self._card_track_ids.pop(source_key, None) + self._card_repliers.pop(out_track_id, None) + return + + # Non-card mode: send sampleMarkdown with retry + last_exc: Exception | None = None + for attempt in range(_max_retries): + try: + if conversation_type == _CONVERSATION_TYPE_GROUP: + await self._send_group_message(robot_code, conversation_id, msg.text, at_user_ids=[sender_staff_id] if sender_staff_id else None) + else: + await self._send_p2p_message(robot_code, sender_staff_id, msg.text) + return + except Exception as exc: + last_exc = exc + if attempt < _max_retries - 1: + delay = 2**attempt + logger.warning( + "[DingTalk] send failed (attempt %d/%d), retrying in %ds: %s", + attempt + 1, + _max_retries, + delay, + exc, + ) + await asyncio.sleep(delay) + + logger.error("[DingTalk] send failed after %d attempts: %s", _max_retries, last_exc) + if last_exc is None: + raise RuntimeError("DingTalk send failed without an exception from any attempt") + raise last_exc + + async def _send_markdown_fallback( + self, + robot_code: str, + conversation_type: str, + sender_staff_id: str, + conversation_id: str, + text: str, + ) -> None: + try: + if conversation_type == _CONVERSATION_TYPE_GROUP: + await self._send_group_message(robot_code, conversation_id, text) + else: + await self._send_p2p_message(robot_code, sender_staff_id, text) + except Exception: + logger.exception("[DingTalk] markdown fallback also failed") + raise + + async def send_file(self, msg: OutboundMessage, attachment: ResolvedAttachment) -> bool: + if attachment.size > _MAX_UPLOAD_SIZE_BYTES: + logger.warning("[DingTalk] file too large (%d bytes), skipping: %s", attachment.size, attachment.filename) + return False + + conversation_type, sender_staff_id, conversation_id = self._resolve_routing(msg) + robot_code = self._client_id + + try: + media_id = await self._upload_media(attachment.actual_path, "image" if attachment.is_image else "file") + if not media_id: + return False + + if attachment.is_image: + msg_key = "sampleImageMsg" + msg_param = json.dumps({"photoURL": media_id}) + else: + msg_key = "sampleFile" + msg_param = json.dumps( + { + "fileUrl": media_id, + "fileName": attachment.filename, + "fileSize": str(attachment.size), + } + ) + + token = await self._get_access_token() + async with httpx.AsyncClient(timeout=httpx.Timeout(30.0)) as client: + if conversation_type == _CONVERSATION_TYPE_GROUP: + response = await client.post( + f"{DINGTALK_API_BASE}/v1.0/robot/groupMessages/send", + headers=self._api_headers(token), + json={ + "msgKey": msg_key, + "msgParam": msg_param, + "robotCode": robot_code, + "openConversationId": conversation_id, + }, + ) + else: + response = await client.post( + f"{DINGTALK_API_BASE}/v1.0/robot/oToMessages/batchSend", + headers=self._api_headers(token), + json={ + "msgKey": msg_key, + "msgParam": msg_param, + "robotCode": robot_code, + "userIds": [sender_staff_id], + }, + ) + response.raise_for_status() + + logger.info("[DingTalk] file sent: %s", attachment.filename) + return True + except (httpx.HTTPError, OSError, ValueError, TypeError, AttributeError): + logger.exception("[DingTalk] failed to send file: %s", attachment.filename) + return False + + # -- stream client (runs in dedicated thread) -------------------------- + + def _run_stream(self, client_id: str, client_secret: str) -> None: + try: + import dingtalk_stream + + credential = dingtalk_stream.Credential(client_id, client_secret) + client = dingtalk_stream.DingTalkStreamClient(credential) + self._stream_client = client + client.register_callback_handler( + dingtalk_stream.chatbot.ChatbotMessage.TOPIC, + _DingTalkMessageHandler(self), + ) + client.start_forever() + except Exception: + if self._running: + logger.exception("DingTalk Stream Push error") + finally: + self._stream_client = None + + def _on_chatbot_message(self, message: Any) -> None: + if not self._running: + return + try: + sender_staff_id = message.sender_staff_id or "" + conversation_type = _normalize_conversation_type(message.conversation_type) + conversation_id = message.conversation_id or "" + msg_id = message.message_id or "" + sender_nick = message.sender_nick or "" + + if self._allowed_users and sender_staff_id not in self._allowed_users: + logger.debug("[DingTalk] ignoring message from non-allowed user: %s", sender_staff_id) + return + + text = self._extract_text(message) + if not text: + logger.info("[DingTalk] empty text, ignoring message") + return + + logger.info( + "[DingTalk] parsed message: conv_type=%s, msg_id=%s, sender=%s(%s), text=%r", + conversation_type, + msg_id, + sender_staff_id, + sender_nick, + text[:100], + ) + + if _is_dingtalk_command(text): + msg_type = InboundMessageType.COMMAND + else: + msg_type = InboundMessageType.CHAT + + # P2P: topic_id=None (single thread per user, like Telegram private chat) + # Group: topic_id=msg_id (each new message starts a new topic, like Feishu) + topic_id: str | None = msg_id if conversation_type == _CONVERSATION_TYPE_GROUP else None + + # chat_id uses conversation_id for groups, sender_staff_id for P2P + chat_id = conversation_id if conversation_type == _CONVERSATION_TYPE_GROUP else sender_staff_id + + inbound = self._make_inbound( + chat_id=chat_id, + user_id=sender_staff_id, + text=text, + msg_type=msg_type, + thread_ts=msg_id, + metadata={ + "conversation_type": conversation_type, + "conversation_id": conversation_id, + "sender_staff_id": sender_staff_id, + "sender_nick": sender_nick, + "message_id": msg_id, + }, + ) + inbound.topic_id = topic_id + + if self._card_template_id: + source_key = self._make_card_source_key(inbound) + with self._incoming_messages_lock: + self._incoming_messages[source_key] = message + + if self._main_loop and self._main_loop.is_running(): + logger.info("[DingTalk] publishing inbound message to bus (type=%s, msg_id=%s)", msg_type.value, msg_id) + fut = asyncio.run_coroutine_threadsafe( + self._prepare_inbound(chat_id, inbound), + self._main_loop, + ) + fut.add_done_callback(lambda f, mid=msg_id: self._log_future_error(f, "prepare_inbound", mid)) + else: + logger.warning("[DingTalk] main loop not running, cannot publish inbound message") + except Exception: + logger.exception("[DingTalk] error processing chatbot message") + + @staticmethod + def _extract_text(message: Any) -> str: + msg_type = message.message_type + if msg_type == "text" and message.text: + return message.text.content.strip() + if msg_type == "richText" and message.rich_text_content: + return _extract_text_from_rich_text(message.rich_text_content.rich_text_list).strip() + return "" + + async def _prepare_inbound(self, chat_id: str, inbound: InboundMessage) -> None: + # Running reply must finish before publish_inbound so AI card tracks are + # registered before the manager emits streaming outbounds. + await self._send_running_reply(chat_id, inbound) + await self.bus.publish_inbound(inbound) + + async def _send_running_reply(self, chat_id: str, inbound: InboundMessage) -> None: + conversation_type = inbound.metadata.get("conversation_type", _CONVERSATION_TYPE_P2P) + sender_staff_id = inbound.metadata.get("sender_staff_id", "") + conversation_id = inbound.metadata.get("conversation_id", "") + text = "\u23f3 Working on it..." + + try: + if self._card_template_id: + source_key = self._make_card_source_key(inbound) + with self._incoming_messages_lock: + chatbot_message = self._incoming_messages.pop(source_key, None) + out_track_id = await self._create_and_deliver_card( + text, + chatbot_message=chatbot_message, + ) + if out_track_id: + self._card_track_ids[source_key] = out_track_id + logger.info("[DingTalk] AI card running reply sent for chat=%s", chat_id) + return + + robot_code = self._client_id + if conversation_type == _CONVERSATION_TYPE_GROUP: + await self._send_text_message_to_group(robot_code, conversation_id, text) + else: + await self._send_text_message_to_user(robot_code, sender_staff_id, text) + logger.info("[DingTalk] 'Working on it...' reply sent for chat=%s", chat_id) + except Exception: + logger.exception("[DingTalk] failed to send running reply for chat=%s", chat_id) + + # -- DingTalk API helpers ---------------------------------------------- + + async def _get_access_token(self) -> str: + if self._cached_token and time.monotonic() < self._token_expires_at: + return self._cached_token + async with self._token_lock: + if self._cached_token and time.monotonic() < self._token_expires_at: + return self._cached_token + async with httpx.AsyncClient(timeout=httpx.Timeout(10.0)) as client: + response = await client.post( + f"{DINGTALK_API_BASE}/v1.0/oauth2/accessToken", + json={"appKey": self._client_id, "appSecret": self._client_secret}, # DingTalk API field names + ) + response.raise_for_status() + data = response.json() + + if not isinstance(data, dict): + raise ValueError(f"DingTalk access token response must be a JSON object, got {type(data).__name__}") + + access_token = data.get("accessToken") + if not isinstance(access_token, str) or not access_token.strip(): + raise ValueError("DingTalk access token response did not contain a usable accessToken") + + raw_expires_in = data.get("expireIn", 7200) + try: + expires_in = int(raw_expires_in) + except (TypeError, ValueError): + logger.warning("[DingTalk] invalid expireIn value %r, using default 7200s", raw_expires_in) + expires_in = 7200 + + self._cached_token = access_token.strip() + self._token_expires_at = time.monotonic() + expires_in - _TOKEN_REFRESH_MARGIN_SECONDS + return self._cached_token + + @staticmethod + def _api_headers(token: str) -> dict[str, str]: + return { + "x-acs-dingtalk-access-token": token, + "Content-Type": "application/json", + } + + async def _send_text_message_to_user(self, robot_code: str, user_id: str, text: str) -> None: + token = await self._get_access_token() + async with httpx.AsyncClient(timeout=httpx.Timeout(30.0)) as client: + response = await client.post( + f"{DINGTALK_API_BASE}/v1.0/robot/oToMessages/batchSend", + headers=self._api_headers(token), + json={ + "msgKey": "sampleText", + "msgParam": json.dumps({"content": text}), + "robotCode": robot_code, + "userIds": [user_id], + }, + ) + response.raise_for_status() + + async def _send_text_message_to_group(self, robot_code: str, conversation_id: str, text: str) -> None: + token = await self._get_access_token() + async with httpx.AsyncClient(timeout=httpx.Timeout(30.0)) as client: + response = await client.post( + f"{DINGTALK_API_BASE}/v1.0/robot/groupMessages/send", + headers=self._api_headers(token), + json={ + "msgKey": "sampleText", + "msgParam": json.dumps({"content": text}), + "robotCode": robot_code, + "openConversationId": conversation_id, + }, + ) + response.raise_for_status() + + async def _send_p2p_message(self, robot_code: str, user_id: str, text: str) -> None: + text = _adapt_markdown_for_dingtalk(text) + token = await self._get_access_token() + async with httpx.AsyncClient(timeout=httpx.Timeout(30.0)) as client: + response = await client.post( + f"{DINGTALK_API_BASE}/v1.0/robot/oToMessages/batchSend", + headers=self._api_headers(token), + json={ + "msgKey": "sampleMarkdown", + "msgParam": json.dumps({"title": "DeerFlow", "text": text}), + "robotCode": robot_code, + "userIds": [user_id], + }, + ) + response.raise_for_status() + data = response.json() + if data.get("processQueryKey"): + logger.info("[DingTalk] P2P message sent to user=%s", user_id) + else: + logger.warning("[DingTalk] P2P send response: %s", data) + + async def _send_group_message( + self, + robot_code: str, + conversation_id: str, + text: str, + *, + at_user_ids: list[str] | None = None, # noqa: ARG002 + ) -> None: + # at_user_ids accepted for call-site compatibility but not passed to the API + # (sampleMarkdown does not support @mentions). + text = _adapt_markdown_for_dingtalk(text) + token = await self._get_access_token() + + async with httpx.AsyncClient(timeout=httpx.Timeout(30.0)) as client: + response = await client.post( + f"{DINGTALK_API_BASE}/v1.0/robot/groupMessages/send", + headers=self._api_headers(token), + json={ + "msgKey": "sampleMarkdown", + "msgParam": json.dumps({"title": "DeerFlow", "text": text}), + "robotCode": robot_code, + "openConversationId": conversation_id, + }, + ) + response.raise_for_status() + data = response.json() + if data.get("processQueryKey"): + logger.info("[DingTalk] group message sent to conversation=%s", conversation_id) + else: + logger.warning("[DingTalk] group send response: %s", data) + + # -- AI Card streaming helpers ------------------------------------------- + + def _make_card_source_key(self, inbound: InboundMessage) -> str: + m = inbound.metadata + return f"{m.get('conversation_type', '')}:{m.get('sender_staff_id', '')}:{m.get('conversation_id', '')}:{m.get('message_id', '')}" + + def _make_card_source_key_from_outbound(self, msg: OutboundMessage) -> str: + m = msg.metadata + correlation_id = m.get("message_id") or msg.thread_ts or "" + return f"{m.get('conversation_type', '')}:{m.get('sender_staff_id', '')}:{m.get('conversation_id', '')}:{correlation_id}" + + async def _create_and_deliver_card( + self, + initial_text: str, + *, + chatbot_message: Any = None, + ) -> str | None: + if self._dingtalk_client is None or chatbot_message is None: + logger.warning("[DingTalk] SDK client or chatbot_message unavailable, skipping AI card") + return None + + try: + from dingtalk_stream.card_replier import AICardReplier + except ImportError: + logger.warning("[DingTalk] dingtalk-stream card_replier not available") + return None + + try: + replier = AICardReplier(self._dingtalk_client, chatbot_message) + card_instance_id = await replier.async_create_and_deliver_card( + card_template_id=self._card_template_id, + card_data={"content": initial_text}, + ) + if not card_instance_id: + return None + + self._card_repliers[card_instance_id] = replier + logger.info("[DingTalk] AI card created: outTrackId=%s", card_instance_id) + return card_instance_id + except Exception: + logger.exception("[DingTalk] failed to create AI card") + return None + + async def _stream_update_card( + self, + out_track_id: str, + content: str, + *, + is_finalize: bool = False, + is_error: bool = False, + ) -> None: + replier = self._card_repliers.get(out_track_id) + if not replier: + raise RuntimeError(f"No AICardReplier found for track ID {out_track_id}") + + await replier.async_streaming( + card_instance_id=out_track_id, + content_key="content", + content_value=content, + append=False, + finished=is_finalize, + failed=is_error, + ) + + # -- media upload -------------------------------------------------------- + + async def _upload_media(self, file_path: str | Path, media_type: str) -> str | None: + try: + file_bytes = await asyncio.to_thread(Path(file_path).read_bytes) + token = await self._get_access_token() + async with httpx.AsyncClient(timeout=httpx.Timeout(60.0)) as client: + response = await client.post( + f"{DINGTALK_API_BASE}/v1.0/files/upload", + headers={"x-acs-dingtalk-access-token": token}, + files={"file": ("upload", file_bytes)}, + data={"type": media_type}, + ) + response.raise_for_status() + try: + payload = response.json() + except json.JSONDecodeError: + logger.exception("[DingTalk] failed to decode upload response JSON: %s", file_path) + return None + if not isinstance(payload, dict): + logger.warning("[DingTalk] unexpected upload response type %s for %s", type(payload).__name__, file_path) + return None + return payload.get("mediaId") + except (httpx.HTTPError, OSError): + logger.exception("[DingTalk] failed to upload media: %s", file_path) + return None + + @staticmethod + def _log_future_error(fut: Any, name: str, msg_id: str) -> None: + try: + exc = fut.exception() + if exc: + logger.error("[DingTalk] %s failed for msg_id=%s: %s", name, msg_id, exc) + except (asyncio.CancelledError, asyncio.InvalidStateError): + pass + + +class _DingTalkMessageHandler: + """Callback handler registered with dingtalk-stream.""" + + def __init__(self, channel: DingTalkChannel) -> None: + self._channel = channel + + def pre_start(self) -> None: + if hasattr(self, "dingtalk_client") and self.dingtalk_client is not None: + self._channel._dingtalk_client = self.dingtalk_client + + async def raw_process(self, callback_message: Any) -> Any: + import dingtalk_stream + from dingtalk_stream.frames import Headers + + code, message = await self.process(callback_message) + ack_message = dingtalk_stream.AckMessage() + ack_message.code = code + ack_message.headers.message_id = callback_message.headers.message_id + ack_message.headers.content_type = Headers.CONTENT_TYPE_APPLICATION_JSON + ack_message.data = {"response": message} + return ack_message + + async def process(self, callback: Any) -> tuple[int, str]: + import dingtalk_stream + + incoming_message = dingtalk_stream.ChatbotMessage.from_dict(callback.data) + self._channel._on_chatbot_message(incoming_message) + return dingtalk_stream.AckMessage.STATUS_OK, "OK" diff --git a/backend/app/channels/feishu.py b/backend/app/channels/feishu.py index c2a637ff9..75892d54d 100644 --- a/backend/app/channels/feishu.py +++ b/backend/app/channels/feishu.py @@ -13,6 +13,7 @@ from app.channels.base import Channel from app.channels.commands import KNOWN_CHANNEL_COMMANDS from app.channels.message_bus import InboundMessage, InboundMessageType, MessageBus, OutboundMessage, ResolvedAttachment from deerflow.config.paths import VIRTUAL_PATH_PREFIX, get_paths +from deerflow.runtime.user_context import get_effective_user_id from deerflow.sandbox.sandbox_provider import get_sandbox_provider logger = logging.getLogger(__name__) @@ -62,6 +63,10 @@ class FeishuChannel(Channel): self._GetMessageResourceRequest = None self._thread_lock = threading.Lock() + @property + def supports_streaming(self) -> bool: + return True + async def start(self) -> None: if self._running: return @@ -344,8 +349,9 @@ class FeishuChannel(Channel): return f"Failed to obtain the [{type}]" paths = get_paths() - paths.ensure_thread_dirs(thread_id) - uploads_dir = paths.sandbox_uploads_dir(thread_id).resolve() + user_id = get_effective_user_id() + paths.ensure_thread_dirs(thread_id, user_id=user_id) + uploads_dir = paths.sandbox_uploads_dir(thread_id, user_id=user_id).resolve() ext = "png" if type == "image" else "bin" raw_filename = getattr(response, "file_name", "") or f"feishu_{file_key[-12:]}.{ext}" diff --git a/backend/app/channels/manager.py b/backend/app/channels/manager.py index e37078ba1..dc865911f 100644 --- a/backend/app/channels/manager.py +++ b/backend/app/channels/manager.py @@ -1,4 +1,4 @@ -"""ChannelManager — consumes inbound messages and dispatches them to the DeerFlow agent via LangGraph Server.""" +"""ChannelManager — consumes inbound messages and dispatches them to the DeerFlow agent via Gateway.""" from __future__ import annotations @@ -17,10 +17,13 @@ from langgraph_sdk.errors import ConflictError from app.channels.commands import KNOWN_CHANNEL_COMMANDS from app.channels.message_bus import InboundMessage, InboundMessageType, MessageBus, OutboundMessage, ResolvedAttachment from app.channels.store import ChannelStore +from app.gateway.csrf_middleware import CSRF_COOKIE_NAME, CSRF_HEADER_NAME, generate_csrf_token +from app.gateway.internal_auth import create_internal_auth_headers +from deerflow.runtime.user_context import get_effective_user_id logger = logging.getLogger(__name__) -DEFAULT_LANGGRAPH_URL = "http://localhost:2024" +DEFAULT_LANGGRAPH_URL = "http://localhost:8001/api" DEFAULT_GATEWAY_URL = "http://localhost:8001" DEFAULT_ASSISTANT_ID = "lead_agent" CUSTOM_AGENT_NAME_PATTERN = re.compile(r"^[A-Za-z0-9-]+$") @@ -35,6 +38,7 @@ STREAM_UPDATE_MIN_INTERVAL_SECONDS = 0.35 THREAD_BUSY_MESSAGE = "This conversation is already processing another request. Please wait for it to finish and try again." CHANNEL_CAPABILITIES = { + "dingtalk": {"supports_streaming": False}, "discord": {"supports_streaming": False}, "feishu": {"supports_streaming": True}, "slack": {"supports_streaming": False}, @@ -45,6 +49,13 @@ CHANNEL_CAPABILITIES = { InboundFileReader = Callable[[dict[str, Any], httpx.AsyncClient], Awaitable[bytes | None]] +_METADATA_DROP_KEYS = frozenset({"raw_message", "ref_msg"}) + + +def _slim_metadata(meta: dict[str, Any]) -> dict[str, Any]: + """Return a shallow copy of *meta* with known-large keys removed.""" + return {k: v for k, v in meta.items() if k not in _METADATA_DROP_KEYS} + INBOUND_FILE_READERS: dict[str, InboundFileReader] = {} @@ -342,14 +353,15 @@ def _resolve_attachments(thread_id: str, artifacts: list[str]) -> list[ResolvedA attachments: list[ResolvedAttachment] = [] paths = get_paths() - outputs_dir = paths.sandbox_outputs_dir(thread_id).resolve() + user_id = get_effective_user_id() + outputs_dir = paths.sandbox_outputs_dir(thread_id, user_id=user_id).resolve() for virtual_path in artifacts: # Security: only allow files from the agent outputs directory if not virtual_path.startswith(_OUTPUTS_VIRTUAL_PREFIX): logger.warning("[Manager] rejected non-outputs artifact path: %s", virtual_path) continue try: - actual = paths.resolve_virtual_path(thread_id, virtual_path) + actual = paths.resolve_virtual_path(thread_id, virtual_path, user_id=user_id) # Verify the resolved path is actually under the outputs directory # (guards against path-traversal even after prefix check) try: @@ -507,7 +519,7 @@ class ChannelManager: """Core dispatcher that bridges IM channels to the DeerFlow agent. It reads from the MessageBus inbound queue, creates/reuses threads on - the LangGraph Server, sends messages via ``runs.wait``, and publishes + Gateway's LangGraph-compatible API, sends messages via ``runs.wait``, and publishes outbound responses back through the bus. """ @@ -532,12 +544,20 @@ class ChannelManager: self._default_session = _as_dict(default_session) self._channel_sessions = dict(channel_sessions or {}) self._client = None # lazy init — langgraph_sdk async client + self._csrf_token = generate_csrf_token() self._semaphore: asyncio.Semaphore | None = None self._running = False self._task: asyncio.Task | None = None @staticmethod def _channel_supports_streaming(channel_name: str) -> bool: + from .service import get_channel_service + + service = get_channel_service() + if service: + channel = service.get_channel(channel_name) + if channel is not None: + return channel.supports_streaming return CHANNEL_CAPABILITIES.get(channel_name, {}).get("supports_streaming", False) def _resolve_session_layer(self, msg: InboundMessage) -> tuple[dict[str, Any], dict[str, Any]]: @@ -593,7 +613,14 @@ class ChannelManager: if self._client is None: from langgraph_sdk import get_client - self._client = get_client(url=self._langgraph_url) + self._client = get_client( + url=self._langgraph_url, + headers={ + **create_internal_auth_headers(), + CSRF_HEADER_NAME: self._csrf_token, + "Cookie": f"{CSRF_COOKIE_NAME}={self._csrf_token}", + }, + ) return self._client # -- lifecycle --------------------------------------------------------- @@ -676,7 +703,7 @@ class ChannelManager: # -- chat handling ----------------------------------------------------- async def _create_thread(self, client, msg: InboundMessage) -> str: - """Create a new thread on the LangGraph Server and store the mapping.""" + """Create a new thread through Gateway and store the mapping.""" thread = await client.threads.create() thread_id = thread["thread_id"] self.store.set_thread_id( @@ -686,7 +713,7 @@ class ChannelManager: topic_id=msg.topic_id, user_id=msg.user_id, ) - logger.info("[Manager] new thread created on LangGraph Server: thread_id=%s for chat_id=%s topic_id=%s", thread_id, msg.chat_id, msg.topic_id) + logger.info("[Manager] new thread created through Gateway: thread_id=%s for chat_id=%s topic_id=%s", thread_id, msg.chat_id, msg.topic_id) return thread_id async def _handle_chat(self, msg: InboundMessage, extra_context: dict[str, Any] | None = None) -> None: @@ -769,6 +796,7 @@ class ChannelManager: artifacts=artifacts, attachments=attachments, thread_ts=msg.thread_ts, + metadata=_slim_metadata(msg.metadata), ) logger.info("[Manager] publishing outbound message to bus: channel=%s, chat_id=%s", msg.channel_name, msg.chat_id) await self.bus.publish_outbound(outbound) @@ -830,6 +858,7 @@ class ChannelManager: text=latest_text, is_final=False, thread_ts=msg.thread_ts, + metadata=_slim_metadata(msg.metadata), ) ) last_published_text = latest_text @@ -874,6 +903,7 @@ class ChannelManager: attachments=attachments, is_final=True, thread_ts=msg.thread_ts, + metadata=_slim_metadata(msg.metadata), ) ) @@ -893,7 +923,7 @@ class ChannelManager: return if command == "new": - # Create a new thread on the LangGraph Server + # Create a new thread through Gateway client = self._get_client() thread = await client.threads.create() new_thread_id = thread["thread_id"] @@ -932,6 +962,7 @@ class ChannelManager: thread_id=self.store.get_thread_id(msg.channel_name, msg.chat_id) or "", text=reply, thread_ts=msg.thread_ts, + metadata=_slim_metadata(msg.metadata), ) await self.bus.publish_outbound(outbound) @@ -965,5 +996,6 @@ class ChannelManager: thread_id=self.store.get_thread_id(msg.channel_name, msg.chat_id) or "", text=error_text, thread_ts=msg.thread_ts, + metadata=_slim_metadata(msg.metadata), ) await self.bus.publish_outbound(outbound) diff --git a/backend/app/channels/service.py b/backend/app/channels/service.py index 8d17f7481..4a3df9060 100644 --- a/backend/app/channels/service.py +++ b/backend/app/channels/service.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging import os -from typing import Any +from typing import TYPE_CHECKING, Any from app.channels.base import Channel from app.channels.manager import DEFAULT_GATEWAY_URL, DEFAULT_LANGGRAPH_URL, ChannelManager @@ -13,8 +13,12 @@ from app.channels.store import ChannelStore logger = logging.getLogger(__name__) +if TYPE_CHECKING: + from deerflow.config.app_config import AppConfig + # Channel name → import path for lazy loading _CHANNEL_REGISTRY: dict[str, str] = { + "dingtalk": "app.channels.dingtalk:DingTalkChannel", "discord": "app.channels.discord:DiscordChannel", "feishu": "app.channels.feishu:FeishuChannel", "slack": "app.channels.slack:SlackChannel", @@ -23,6 +27,17 @@ _CHANNEL_REGISTRY: dict[str, str] = { "wecom": "app.channels.wecom:WeComChannel", } +# Keys that indicate a user has configured credentials for a channel. +_CHANNEL_CREDENTIAL_KEYS: dict[str, list[str]] = { + "dingtalk": ["client_id", "client_secret"], + "discord": ["bot_token"], + "feishu": ["app_id", "app_secret"], + "slack": ["bot_token", "app_token"], + "telegram": ["bot_token"], + "wecom": ["bot_id", "bot_secret"], + "wechat": ["bot_token"], +} + _CHANNELS_LANGGRAPH_URL_ENV = "DEER_FLOW_CHANNELS_LANGGRAPH_URL" _CHANNELS_GATEWAY_URL_ENV = "DEER_FLOW_CHANNELS_GATEWAY_URL" @@ -65,14 +80,15 @@ class ChannelService: self._running = False @classmethod - def from_app_config(cls) -> ChannelService: + def from_app_config(cls, app_config: AppConfig | None = None) -> ChannelService: """Create a ChannelService from the application config.""" - from deerflow.config.app_config import get_app_config + if app_config is None: + from deerflow.config.app_config import get_app_config - config = get_app_config() + app_config = get_app_config() channels_config = {} # extra fields are allowed by AppConfig (extra="allow") - extra = config.model_extra or {} + extra = app_config.model_extra or {} if "channels" in extra: channels_config = extra["channels"] return cls(channels_config=channels_config) @@ -88,7 +104,16 @@ class ChannelService: if not isinstance(channel_config, dict): continue if not channel_config.get("enabled", False): - logger.info("Channel %s is disabled, skipping", name) + cred_keys = _CHANNEL_CREDENTIAL_KEYS.get(name, []) + has_creds = any(not isinstance(channel_config.get(k), bool) and channel_config.get(k) is not None and str(channel_config[k]).strip() for k in cred_keys) + if has_creds: + logger.warning( + "Channel '%s' has credentials configured but is disabled. Set enabled: true under channels.%s in config.yaml to activate it.", + name, + name, + ) + else: + logger.info("Channel %s is disabled, skipping", name) continue await self._start_channel(name, channel_config) @@ -143,11 +168,16 @@ class ChannelService: try: channel = channel_cls(bus=self.bus, config=config) - await channel.start() self._channels[name] = channel + await channel.start() + if not channel.is_running: + self._channels.pop(name, None) + logger.error("Channel %s did not enter a running state after start()", name) + return False logger.info("Channel %s started", name) return True except Exception: + self._channels.pop(name, None) logger.exception("Failed to start channel %s", name) return False @@ -182,12 +212,12 @@ def get_channel_service() -> ChannelService | None: return _channel_service -async def start_channel_service() -> ChannelService: +async def start_channel_service(app_config: AppConfig | None = None) -> ChannelService: """Create and start the global ChannelService from app config.""" global _channel_service if _channel_service is not None: return _channel_service - _channel_service = ChannelService.from_app_config() + _channel_service = ChannelService.from_app_config(app_config) await _channel_service.start() return _channel_service diff --git a/backend/app/channels/slack.py b/backend/app/channels/slack.py index c9ad6a6ec..65cb36cf5 100644 --- a/backend/app/channels/slack.py +++ b/backend/app/channels/slack.py @@ -16,13 +16,31 @@ logger = logging.getLogger(__name__) _slack_md_converter = SlackMarkdownConverter() +def _normalize_allowed_users(allowed_users: Any) -> set[str]: + if allowed_users is None: + return set() + if isinstance(allowed_users, str): + values = [allowed_users] + elif isinstance(allowed_users, list | tuple | set): + values = allowed_users + else: + logger.warning( + "Slack allowed_users should be a list of Slack user IDs or a single Slack user ID string; treating %s as one string value", + type(allowed_users).__name__, + ) + values = [allowed_users] + return {str(user_id) for user_id in values if str(user_id)} + + class SlackChannel(Channel): """Slack IM channel using Socket Mode (WebSocket, no public IP). Configuration keys (in ``config.yaml`` under ``channels.slack``): - ``bot_token``: Slack Bot User OAuth Token (xoxb-...). - ``app_token``: Slack App-Level Token (xapp-...) for Socket Mode. - - ``allowed_users``: (optional) List of allowed Slack user IDs. Empty = allow all. + - ``allowed_users``: (optional) List of allowed Slack user IDs, or a + single Slack user ID string as shorthand. Empty = allow all. Other + scalar values are treated as a single string with a warning. """ def __init__(self, bus: MessageBus, config: dict[str, Any]) -> None: @@ -30,7 +48,7 @@ class SlackChannel(Channel): self._socket_client = None self._web_client = None self._loop: asyncio.AbstractEventLoop | None = None - self._allowed_users: set[str] = {str(user_id) for user_id in config.get("allowed_users", [])} + self._allowed_users = _normalize_allowed_users(config.get("allowed_users", [])) async def start(self) -> None: if self._running: diff --git a/backend/app/channels/wecom.py b/backend/app/channels/wecom.py index 5a8948bd4..3e0cdb3d1 100644 --- a/backend/app/channels/wecom.py +++ b/backend/app/channels/wecom.py @@ -29,6 +29,10 @@ class WeComChannel(Channel): self._ws_stream_ids: dict[str, str] = {} self._working_message = "Working on it..." + @property + def supports_streaming(self) -> bool: + return True + def _clear_ws_context(self, thread_ts: str | None) -> None: if not thread_ts: return diff --git a/backend/app/gateway/app.py b/backend/app/gateway/app.py index 39d17498f..2a506df2b 100644 --- a/backend/app/gateway/app.py +++ b/backend/app/gateway/app.py @@ -1,16 +1,23 @@ +import asyncio import logging +import os from collections.abc import AsyncGenerator from contextlib import asynccontextmanager from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from app.gateway.auth_middleware import AuthMiddleware from app.gateway.config import get_gateway_config +from app.gateway.csrf_middleware import CSRFMiddleware from app.gateway.deps import langgraph_runtime from app.gateway.routers import ( agents, artifacts, assistants_compat, + auth, channels, + feedback, mcp, memory, models, @@ -21,9 +28,13 @@ from app.gateway.routers import ( threads, uploads, ) -from deerflow.config.app_config import get_app_config +from deerflow.config import app_config as deerflow_app_config +from deerflow.config.app_config import apply_logging_level -# Configure logging +AppConfig = deerflow_app_config.AppConfig +get_app_config = deerflow_app_config.get_app_config + +# Default logging; lifespan overrides from config.yaml log_level. logging.basicConfig( level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", @@ -32,6 +43,120 @@ logging.basicConfig( logger = logging.getLogger(__name__) +# Upper bound (seconds) each lifespan shutdown hook is allowed to run. +# Bounds worker exit time so uvicorn's reload supervisor does not keep +# firing signals into a worker that is stuck waiting for shutdown cleanup. +_SHUTDOWN_HOOK_TIMEOUT_SECONDS = 5.0 + + +async def _ensure_admin_user(app: FastAPI) -> None: + """Startup hook: handle first boot and migrate orphan threads otherwise. + + After admin creation, migrate orphan threads from the LangGraph + store (metadata.user_id unset) to the admin account. This is the + "no-auth → with-auth" upgrade path: users who ran DeerFlow without + authentication have existing LangGraph thread data that needs an + owner assigned. + First boot (no admin exists): + - Does NOT create any user accounts automatically. + - The operator must visit ``/setup`` to create the first admin. + + Subsequent boots (admin already exists): + - Runs the one-time "no-auth → with-auth" orphan thread migration for + existing LangGraph thread metadata that has no owner_id. + + No SQL persistence migration is needed: the four user_id columns + (threads_meta, runs, run_events, feedback) only come into existence + alongside the auth module via create_all, so freshly created tables + never contain NULL-owner rows. + """ + from sqlalchemy import select + + from app.gateway.deps import get_local_provider + from deerflow.persistence.engine import get_session_factory + from deerflow.persistence.user.model import UserRow + + try: + provider = get_local_provider() + except RuntimeError: + # Auth persistence may not be initialized in some test/boot paths. + # Skip admin migration work rather than failing gateway startup. + logger.warning("Auth persistence not ready; skipping admin bootstrap check") + return + + sf = get_session_factory() + if sf is None: + return + + admin_count = await provider.count_admin_users() + + if admin_count == 0: + logger.info("=" * 60) + logger.info(" First boot detected — no admin account exists.") + logger.info(" Visit /setup to complete admin account creation.") + logger.info("=" * 60) + return + + # Admin already exists — run orphan thread migration for any + # LangGraph thread metadata that pre-dates the auth module. + async with sf() as session: + stmt = select(UserRow).where(UserRow.system_role == "admin").limit(1) + row = (await session.execute(stmt)).scalar_one_or_none() + + if row is None: + return # Should not happen (admin_count > 0 above), but be safe. + + admin_id = str(row.id) + + # LangGraph store orphan migration — non-fatal. + # This covers the "no-auth → with-auth" upgrade path for users + # whose existing LangGraph thread metadata has no user_id set. + store = getattr(app.state, "store", None) + if store is not None: + try: + migrated = await _migrate_orphaned_threads(store, admin_id) + if migrated: + logger.info("Migrated %d orphan LangGraph thread(s) to admin", migrated) + except Exception: + logger.exception("LangGraph thread migration failed (non-fatal)") + + +async def _iter_store_items(store, namespace, *, page_size: int = 500): + """Paginated async iterator over a LangGraph store namespace. + + Replaces the old hardcoded ``limit=1000`` call with a cursor-style + loop so that environments with more than one page of orphans do + not silently lose data. Terminates when a page is empty OR when a + short page arrives (indicating the last page). + """ + offset = 0 + while True: + batch = await store.asearch(namespace, limit=page_size, offset=offset) + if not batch: + return + for item in batch: + yield item + if len(batch) < page_size: + return + offset += page_size + + +async def _migrate_orphaned_threads(store, admin_user_id: str) -> int: + """Migrate LangGraph store threads with no user_id to the given admin. + + Uses cursor pagination so all orphans are migrated regardless of + count. Returns the number of rows migrated. + """ + migrated = 0 + async for item in _iter_store_items(store, ("threads",)): + metadata = item.value.get("metadata", {}) + if not metadata.get("user_id"): + metadata["user_id"] = admin_user_id + item.value["metadata"] = metadata + await store.aput(("threads",), item.key, item.value) + migrated += 1 + return migrated + @asynccontextmanager async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: @@ -39,7 +164,8 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: # Load config and check necessary environment variables at startup try: - get_app_config() + app.state.config = get_app_config() + apply_logging_level(app.state.config.log_level) logger.info("Configuration loaded successfully") except Exception as e: error_msg = f"Failed to load configuration during gateway startup: {e}" @@ -52,22 +178,34 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: async with langgraph_runtime(app): logger.info("LangGraph runtime initialised") + # Ensure admin user exists (auto-create on first boot) + # Must run AFTER langgraph_runtime so app.state.store is available for thread migration + await _ensure_admin_user(app) + # Start IM channel service if any channels are configured try: from app.channels.service import start_channel_service - channel_service = await start_channel_service() + channel_service = await start_channel_service(app.state.config) logger.info("Channel service started: %s", channel_service.get_status()) except Exception: logger.exception("No IM channels configured or channel service failed to start") yield - # Stop channel service on shutdown + # Stop channel service on shutdown (bounded to prevent worker hang) try: from app.channels.service import stop_channel_service - await stop_channel_service() + await asyncio.wait_for( + stop_channel_service(), + timeout=_SHUTDOWN_HOOK_TIMEOUT_SECONDS, + ) + except TimeoutError: + logger.warning( + "Channel service shutdown exceeded %.1fs; proceeding with worker exit.", + _SHUTDOWN_HOOK_TIMEOUT_SECONDS, + ) except Exception: logger.exception("Failed to stop channel service") @@ -80,6 +218,8 @@ def create_app() -> FastAPI: Returns: Configured FastAPI application instance. """ + config = get_gateway_config() + docs_kwargs = {"docs_url": "/docs", "redoc_url": "/redoc", "openapi_url": "/openapi.json"} if config.enable_docs else {"docs_url": None, "redoc_url": None, "openapi_url": None} app = FastAPI( title="DeerFlow API Gateway", @@ -104,9 +244,7 @@ This gateway provides custom endpoints for models, MCP configuration, skills, an """, version="0.1.0", lifespan=lifespan, - docs_url="/docs", - redoc_url="/redoc", - openapi_url="/openapi.json", + **docs_kwargs, openapi_tags=[ { "name": "models", @@ -163,7 +301,31 @@ This gateway provides custom endpoints for models, MCP configuration, skills, an ], ) - # CORS is handled by nginx - no need for FastAPI middleware + # Auth: reject unauthenticated requests to non-public paths (fail-closed safety net) + app.add_middleware(AuthMiddleware) + + # CSRF: Double Submit Cookie pattern for state-changing requests + app.add_middleware(CSRFMiddleware) + + # CORS: when GATEWAY_CORS_ORIGINS is set (dev without nginx), add CORS middleware. + # In production, nginx handles CORS and no middleware is needed. + cors_origins_env = os.environ.get("GATEWAY_CORS_ORIGINS", "") + if cors_origins_env: + cors_origins = [o.strip() for o in cors_origins_env.split(",") if o.strip()] + # Validate: wildcard origin with credentials is a security misconfiguration + for origin in cors_origins: + if origin == "*": + logger.error("GATEWAY_CORS_ORIGINS contains wildcard '*' with allow_credentials=True. This is a security misconfiguration — browsers will reject the response. Use explicit scheme://host:port origins instead.") + cors_origins = [o for o in cors_origins if o != "*"] + break + if cors_origins: + app.add_middleware( + CORSMiddleware, + allow_origins=cors_origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) # Include routers # Models API is mounted at /api/models @@ -199,6 +361,12 @@ This gateway provides custom endpoints for models, MCP configuration, skills, an # Assistants compatibility API (LangGraph Platform stub) app.include_router(assistants_compat.router) + # Auth API is mounted at /api/v1/auth + app.include_router(auth.router) + + # Feedback API is mounted at /api/threads/{thread_id}/runs/{run_id}/feedback + app.include_router(feedback.router) + # Thread Runs API (LangGraph Platform-compatible runs lifecycle) app.include_router(thread_runs.router) diff --git a/backend/app/gateway/auth/__init__.py b/backend/app/gateway/auth/__init__.py new file mode 100644 index 000000000..4e9b71c42 --- /dev/null +++ b/backend/app/gateway/auth/__init__.py @@ -0,0 +1,42 @@ +"""Authentication module for DeerFlow. + +This module provides: +- JWT-based authentication +- Provider Factory pattern for extensible auth methods +- UserRepository interface for storage backends (SQLite) +""" + +from app.gateway.auth.config import AuthConfig, get_auth_config, set_auth_config +from app.gateway.auth.errors import AuthErrorCode, AuthErrorResponse, TokenError +from app.gateway.auth.jwt import TokenPayload, create_access_token, decode_token +from app.gateway.auth.local_provider import LocalAuthProvider +from app.gateway.auth.models import User, UserResponse +from app.gateway.auth.password import hash_password, verify_password +from app.gateway.auth.providers import AuthProvider +from app.gateway.auth.repositories.base import UserRepository + +__all__ = [ + # Config + "AuthConfig", + "get_auth_config", + "set_auth_config", + # Errors + "AuthErrorCode", + "AuthErrorResponse", + "TokenError", + # JWT + "TokenPayload", + "create_access_token", + "decode_token", + # Password + "hash_password", + "verify_password", + # Models + "User", + "UserResponse", + # Providers + "AuthProvider", + "LocalAuthProvider", + # Repository + "UserRepository", +] diff --git a/backend/app/gateway/auth/config.py b/backend/app/gateway/auth/config.py new file mode 100644 index 000000000..4734f0897 --- /dev/null +++ b/backend/app/gateway/auth/config.py @@ -0,0 +1,57 @@ +"""Authentication configuration for DeerFlow.""" + +import logging +import os +import secrets + +from pydantic import BaseModel, Field + +logger = logging.getLogger(__name__) + + +class AuthConfig(BaseModel): + """JWT and auth-related configuration. Parsed once at startup. + + Note: the ``users`` table now lives in the shared persistence + database managed by ``deerflow.persistence.engine``. The old + ``users_db_path`` config key has been removed — user storage is + configured through ``config.database`` like every other table. + """ + + jwt_secret: str = Field( + ..., + description="Secret key for JWT signing. MUST be set via AUTH_JWT_SECRET.", + ) + token_expiry_days: int = Field(default=7, ge=1, le=30) + oauth_github_client_id: str | None = Field(default=None) + oauth_github_client_secret: str | None = Field(default=None) + + +_auth_config: AuthConfig | None = None + + +def get_auth_config() -> AuthConfig: + """Get the global AuthConfig instance. Parses from env on first call.""" + global _auth_config + if _auth_config is None: + from dotenv import load_dotenv + + load_dotenv() + jwt_secret = os.environ.get("AUTH_JWT_SECRET") + if not jwt_secret: + jwt_secret = secrets.token_urlsafe(32) + os.environ["AUTH_JWT_SECRET"] = jwt_secret + logger.warning( + "⚠ AUTH_JWT_SECRET is not set — using an auto-generated ephemeral secret. " + "Sessions will be invalidated on restart. " + "For production, add AUTH_JWT_SECRET to your .env file: " + 'python -c "import secrets; print(secrets.token_urlsafe(32))"' + ) + _auth_config = AuthConfig(jwt_secret=jwt_secret) + return _auth_config + + +def set_auth_config(config: AuthConfig) -> None: + """Set the global AuthConfig instance (for testing).""" + global _auth_config + _auth_config = config diff --git a/backend/app/gateway/auth/credential_file.py b/backend/app/gateway/auth/credential_file.py new file mode 100644 index 000000000..100ca3b04 --- /dev/null +++ b/backend/app/gateway/auth/credential_file.py @@ -0,0 +1,48 @@ +"""Write initial admin credentials to a restricted file instead of logs. + +Logging secrets to stdout/stderr is a well-known CodeQL finding +(py/clear-text-logging-sensitive-data) — in production those logs +get collected into ELK/Splunk/etc and become a secret sprawl +source. This helper writes the credential to a 0600 file that only +the process user can read, and returns the path so the caller can +log **the path** (not the password) for the operator to pick up. +""" + +from __future__ import annotations + +import os +from pathlib import Path + +from deerflow.config.paths import get_paths + +_CREDENTIAL_FILENAME = "admin_initial_credentials.txt" + + +def write_initial_credentials(email: str, password: str, *, label: str = "initial") -> Path: + """Write the admin email + password to ``{base_dir}/admin_initial_credentials.txt``. + + The file is created **atomically** with mode 0600 via ``os.open`` + so the password is never world-readable, even for the single syscall + window between ``write_text`` and ``chmod``. + + ``label`` distinguishes "initial" (fresh creation) from "reset" + (password reset) in the file header so an operator picking up the + file after a restart can tell which event produced it. + + Returns the absolute :class:`Path` to the file. + """ + target = get_paths().base_dir / _CREDENTIAL_FILENAME + target.parent.mkdir(parents=True, exist_ok=True) + + content = ( + f"# DeerFlow admin {label} credentials\n# This file is generated on first boot or password reset.\n# Change the password after login via Settings -> Account,\n# then delete this file.\n#\nemail: {email}\npassword: {password}\n" + ) + + # Atomic 0600 create-or-truncate. O_TRUNC (not O_EXCL) so the + # reset-password path can rewrite an existing file without a + # separate unlink-then-create dance. + fd = os.open(target, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o600) + with os.fdopen(fd, "w", encoding="utf-8") as fh: + fh.write(content) + + return target.resolve() diff --git a/backend/app/gateway/auth/errors.py b/backend/app/gateway/auth/errors.py new file mode 100644 index 000000000..b5899ebd8 --- /dev/null +++ b/backend/app/gateway/auth/errors.py @@ -0,0 +1,45 @@ +"""Typed error definitions for auth module. + +AuthErrorCode: exhaustive enum of all auth failure conditions. +TokenError: exhaustive enum of JWT decode failures. +AuthErrorResponse: structured error payload for HTTP responses. +""" + +from enum import StrEnum + +from pydantic import BaseModel + + +class AuthErrorCode(StrEnum): + """Exhaustive list of auth error conditions.""" + + INVALID_CREDENTIALS = "invalid_credentials" + TOKEN_EXPIRED = "token_expired" + TOKEN_INVALID = "token_invalid" + USER_NOT_FOUND = "user_not_found" + EMAIL_ALREADY_EXISTS = "email_already_exists" + PROVIDER_NOT_FOUND = "provider_not_found" + NOT_AUTHENTICATED = "not_authenticated" + SYSTEM_ALREADY_INITIALIZED = "system_already_initialized" + + +class TokenError(StrEnum): + """Exhaustive list of JWT decode failure reasons.""" + + EXPIRED = "expired" + INVALID_SIGNATURE = "invalid_signature" + MALFORMED = "malformed" + + +class AuthErrorResponse(BaseModel): + """Structured error response — replaces bare `detail` strings.""" + + code: AuthErrorCode + message: str + + +def token_error_to_code(err: TokenError) -> AuthErrorCode: + """Map TokenError to AuthErrorCode — single source of truth.""" + if err == TokenError.EXPIRED: + return AuthErrorCode.TOKEN_EXPIRED + return AuthErrorCode.TOKEN_INVALID diff --git a/backend/app/gateway/auth/jwt.py b/backend/app/gateway/auth/jwt.py new file mode 100644 index 000000000..3853692b7 --- /dev/null +++ b/backend/app/gateway/auth/jwt.py @@ -0,0 +1,55 @@ +"""JWT token creation and verification.""" + +from datetime import UTC, datetime, timedelta + +import jwt +from pydantic import BaseModel + +from app.gateway.auth.config import get_auth_config +from app.gateway.auth.errors import TokenError + + +class TokenPayload(BaseModel): + """JWT token payload.""" + + sub: str # user_id + exp: datetime + iat: datetime | None = None + ver: int = 0 # token_version — must match User.token_version + + +def create_access_token(user_id: str, expires_delta: timedelta | None = None, token_version: int = 0) -> str: + """Create a JWT access token. + + Args: + user_id: The user's UUID as string + expires_delta: Optional custom expiry, defaults to 7 days + token_version: User's current token_version for invalidation + + Returns: + Encoded JWT string + """ + config = get_auth_config() + expiry = expires_delta or timedelta(days=config.token_expiry_days) + + now = datetime.now(UTC) + payload = {"sub": user_id, "exp": now + expiry, "iat": now, "ver": token_version} + return jwt.encode(payload, config.jwt_secret, algorithm="HS256") + + +def decode_token(token: str) -> TokenPayload | TokenError: + """Decode and validate a JWT token. + + Returns: + TokenPayload if valid, or a specific TokenError variant. + """ + config = get_auth_config() + try: + payload = jwt.decode(token, config.jwt_secret, algorithms=["HS256"]) + return TokenPayload(**payload) + except jwt.ExpiredSignatureError: + return TokenError.EXPIRED + except jwt.InvalidSignatureError: + return TokenError.INVALID_SIGNATURE + except jwt.PyJWTError: + return TokenError.MALFORMED diff --git a/backend/app/gateway/auth/local_provider.py b/backend/app/gateway/auth/local_provider.py new file mode 100644 index 000000000..fee810ad1 --- /dev/null +++ b/backend/app/gateway/auth/local_provider.py @@ -0,0 +1,104 @@ +"""Local email/password authentication provider.""" + +import logging + +from app.gateway.auth.models import User +from app.gateway.auth.password import hash_password_async, needs_rehash, verify_password_async +from app.gateway.auth.providers import AuthProvider +from app.gateway.auth.repositories.base import UserRepository + +logger = logging.getLogger(__name__) + + +class LocalAuthProvider(AuthProvider): + """Email/password authentication provider using local database.""" + + def __init__(self, repository: UserRepository): + """Initialize with a UserRepository. + + Args: + repository: UserRepository implementation (SQLite) + """ + self._repo = repository + + async def authenticate(self, credentials: dict) -> User | None: + """Authenticate with email and password. + + Args: + credentials: dict with 'email' and 'password' keys + + Returns: + User if authentication succeeds, None otherwise + """ + email = credentials.get("email") + password = credentials.get("password") + + if not email or not password: + return None + + user = await self._repo.get_user_by_email(email) + if user is None: + return None + + if user.password_hash is None: + # OAuth user without local password + return None + + if not await verify_password_async(password, user.password_hash): + return None + + if needs_rehash(user.password_hash): + try: + user.password_hash = await hash_password_async(password) + await self._repo.update_user(user) + except Exception: + # Rehash is an opportunistic upgrade; a transient DB error must not + # prevent an otherwise-valid login from succeeding. + logger.warning("Failed to rehash password for user %s; login will still succeed", user.email, exc_info=True) + + return user + + async def get_user(self, user_id: str) -> User | None: + """Get user by ID.""" + return await self._repo.get_user_by_id(user_id) + + async def create_user(self, email: str, password: str | None = None, system_role: str = "user", needs_setup: bool = False) -> User: + """Create a new local user. + + Args: + email: User email address + password: Plain text password (will be hashed) + system_role: Role to assign ("admin" or "user") + needs_setup: If True, user must complete setup on first login + + Returns: + Created User instance + """ + password_hash = await hash_password_async(password) if password else None + user = User( + email=email, + password_hash=password_hash, + system_role=system_role, + needs_setup=needs_setup, + ) + return await self._repo.create_user(user) + + async def get_user_by_oauth(self, provider: str, oauth_id: str) -> User | None: + """Get user by OAuth provider and ID.""" + return await self._repo.get_user_by_oauth(provider, oauth_id) + + async def count_users(self) -> int: + """Return total number of registered users.""" + return await self._repo.count_users() + + async def count_admin_users(self) -> int: + """Return number of admin users.""" + return await self._repo.count_admin_users() + + async def update_user(self, user: User) -> User: + """Update an existing user.""" + return await self._repo.update_user(user) + + async def get_user_by_email(self, email: str) -> User | None: + """Get user by email.""" + return await self._repo.get_user_by_email(email) diff --git a/backend/app/gateway/auth/models.py b/backend/app/gateway/auth/models.py new file mode 100644 index 000000000..d8f9b954a --- /dev/null +++ b/backend/app/gateway/auth/models.py @@ -0,0 +1,41 @@ +"""User Pydantic models for authentication.""" + +from datetime import UTC, datetime +from typing import Literal +from uuid import UUID, uuid4 + +from pydantic import BaseModel, ConfigDict, EmailStr, Field + + +def _utc_now() -> datetime: + """Return current UTC time (timezone-aware).""" + return datetime.now(UTC) + + +class User(BaseModel): + """Internal user representation.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID = Field(default_factory=uuid4, description="Primary key") + email: EmailStr = Field(..., description="Unique email address") + password_hash: str | None = Field(None, description="bcrypt hash, nullable for OAuth users") + system_role: Literal["admin", "user"] = Field(default="user") + created_at: datetime = Field(default_factory=_utc_now) + + # OAuth linkage (optional) + oauth_provider: str | None = Field(None, description="e.g. 'github', 'google'") + oauth_id: str | None = Field(None, description="User ID from OAuth provider") + + # Auth lifecycle + needs_setup: bool = Field(default=False, description="True for auto-created admin until setup completes") + token_version: int = Field(default=0, description="Incremented on password change to invalidate old JWTs") + + +class UserResponse(BaseModel): + """Response model for user info endpoint.""" + + id: str + email: str + system_role: Literal["admin", "user"] + needs_setup: bool = False diff --git a/backend/app/gateway/auth/password.py b/backend/app/gateway/auth/password.py new file mode 100644 index 000000000..551c1134e --- /dev/null +++ b/backend/app/gateway/auth/password.py @@ -0,0 +1,81 @@ +"""Password hashing utilities with versioned hash format. + +Hash format: ``$dfv$`` where ```` is the version. + +- **v1** (legacy): ``bcrypt(password)`` — plain bcrypt, susceptible to + 72-byte silent truncation. +- **v2** (current): ``bcrypt(b64(sha256(password)))`` — SHA-256 pre-hash + avoids the 72-byte truncation limit so the full password contributes + to the hash. + +Verification auto-detects the version and falls back to v1 for hashes +without a prefix, so existing deployments upgrade transparently on next +login. +""" + +import asyncio +import base64 +import hashlib + +import bcrypt + +_CURRENT_VERSION = 2 +_PREFIX_V2 = "$dfv2$" +_PREFIX_V1 = "$dfv1$" + + +def _pre_hash_v2(password: str) -> bytes: + """SHA-256 pre-hash to bypass bcrypt's 72-byte limit.""" + return base64.b64encode(hashlib.sha256(password.encode("utf-8")).digest()) + + +def hash_password(password: str) -> str: + """Hash a password (current version: v2 — SHA-256 + bcrypt).""" + raw = bcrypt.hashpw(_pre_hash_v2(password), bcrypt.gensalt()).decode("utf-8") + return f"{_PREFIX_V2}{raw}" + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + """Verify a password, auto-detecting the hash version. + + Accepts v2 (``$dfv2$…``), v1 (``$dfv1$…``), and bare bcrypt hashes + (treated as v1 for backward compatibility with pre-versioning data). + """ + try: + if hashed_password.startswith(_PREFIX_V2): + bcrypt_hash = hashed_password[len(_PREFIX_V2) :] + return bcrypt.checkpw(_pre_hash_v2(plain_password), bcrypt_hash.encode("utf-8")) + + if hashed_password.startswith(_PREFIX_V1): + bcrypt_hash = hashed_password[len(_PREFIX_V1) :] + else: + bcrypt_hash = hashed_password + + return bcrypt.checkpw(plain_password.encode("utf-8"), bcrypt_hash.encode("utf-8")) + except ValueError: + # bcrypt raises ValueError for malformed or corrupt hashes (e.g., invalid salt). + # Fail closed rather than crashing the request. + return False + + +def needs_rehash(hashed_password: str) -> bool: + """Return True if the hash uses an older version and should be rehashed.""" + return not hashed_password.startswith(_PREFIX_V2) + + +async def hash_password_async(password: str) -> str: + """Hash a password using bcrypt (non-blocking). + + Wraps the blocking bcrypt operation in a thread pool to avoid + blocking the event loop during password hashing. + """ + return await asyncio.to_thread(hash_password, password) + + +async def verify_password_async(plain_password: str, hashed_password: str) -> bool: + """Verify a password against its hash (non-blocking). + + Wraps the blocking bcrypt operation in a thread pool to avoid + blocking the event loop during password verification. + """ + return await asyncio.to_thread(verify_password, plain_password, hashed_password) diff --git a/backend/app/gateway/auth/providers.py b/backend/app/gateway/auth/providers.py new file mode 100644 index 000000000..95571d5d0 --- /dev/null +++ b/backend/app/gateway/auth/providers.py @@ -0,0 +1,24 @@ +"""Auth provider abstraction.""" + +from abc import ABC, abstractmethod + + +class AuthProvider(ABC): + """Abstract base class for authentication providers.""" + + @abstractmethod + async def authenticate(self, credentials: dict) -> "User | None": + """Authenticate user with given credentials. + + Returns User if authentication succeeds, None otherwise. + """ + raise NotImplementedError + + @abstractmethod + async def get_user(self, user_id: str) -> "User | None": + """Retrieve user by ID.""" + raise NotImplementedError + + +# Import User at runtime to avoid circular imports +from app.gateway.auth.models import User # noqa: E402 diff --git a/backend/app/gateway/auth/repositories/__init__.py b/backend/app/gateway/auth/repositories/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/app/gateway/auth/repositories/base.py b/backend/app/gateway/auth/repositories/base.py new file mode 100644 index 000000000..b5baa02c7 --- /dev/null +++ b/backend/app/gateway/auth/repositories/base.py @@ -0,0 +1,102 @@ +"""User repository interface for abstracting database operations.""" + +from abc import ABC, abstractmethod + +from app.gateway.auth.models import User + + +class UserNotFoundError(LookupError): + """Raised when a user repository operation targets a non-existent row. + + Subclass of :class:`LookupError` so callers that already catch + ``LookupError`` for "missing entity" can keep working unchanged, + while specific call sites can pin to this class to distinguish + "concurrent delete during update" from other lookups. + """ + + +class UserRepository(ABC): + """Abstract interface for user data storage. + + Implement this interface to support different storage backends + (SQLite) + """ + + @abstractmethod + async def create_user(self, user: User) -> User: + """Create a new user. + + Args: + user: User object to create + + Returns: + Created User with ID assigned + + Raises: + ValueError: If email already exists + """ + raise NotImplementedError + + @abstractmethod + async def get_user_by_id(self, user_id: str) -> User | None: + """Get user by ID. + + Args: + user_id: User UUID as string + + Returns: + User if found, None otherwise + """ + raise NotImplementedError + + @abstractmethod + async def get_user_by_email(self, email: str) -> User | None: + """Get user by email. + + Args: + email: User email address + + Returns: + User if found, None otherwise + """ + raise NotImplementedError + + @abstractmethod + async def update_user(self, user: User) -> User: + """Update an existing user. + + Args: + user: User object with updated fields + + Returns: + Updated User + + Raises: + UserNotFoundError: If no row exists for ``user.id``. This is + a hard failure (not a no-op) so callers cannot mistake a + concurrent-delete race for a successful update. + """ + raise NotImplementedError + + @abstractmethod + async def count_users(self) -> int: + """Return total number of registered users.""" + raise NotImplementedError + + @abstractmethod + async def count_admin_users(self) -> int: + """Return number of users with system_role == 'admin'.""" + raise NotImplementedError + + @abstractmethod + async def get_user_by_oauth(self, provider: str, oauth_id: str) -> User | None: + """Get user by OAuth provider and ID. + + Args: + provider: OAuth provider name (e.g. 'github', 'google') + oauth_id: User ID from the OAuth provider + + Returns: + User if found, None otherwise + """ + raise NotImplementedError diff --git a/backend/app/gateway/auth/repositories/sqlite.py b/backend/app/gateway/auth/repositories/sqlite.py new file mode 100644 index 000000000..3ee3978e3 --- /dev/null +++ b/backend/app/gateway/auth/repositories/sqlite.py @@ -0,0 +1,127 @@ +"""SQLAlchemy-backed UserRepository implementation. + +Uses the shared async session factory from +``deerflow.persistence.engine`` — the ``users`` table lives in the +same database as ``threads_meta``, ``runs``, ``run_events``, and +``feedback``. + +Constructor takes the session factory directly (same pattern as the +other four repositories in ``deerflow.persistence.*``). Callers +construct this after ``init_engine_from_config()`` has run. +""" + +from __future__ import annotations + +from datetime import UTC +from uuid import UUID + +from sqlalchemy import func, select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker + +from app.gateway.auth.models import User +from app.gateway.auth.repositories.base import UserNotFoundError, UserRepository +from deerflow.persistence.user.model import UserRow + + +class SQLiteUserRepository(UserRepository): + """Async user repository backed by the shared SQLAlchemy engine.""" + + def __init__(self, session_factory: async_sessionmaker[AsyncSession]) -> None: + self._sf = session_factory + + # ── Converters ──────────────────────────────────────────────────── + + @staticmethod + def _row_to_user(row: UserRow) -> User: + return User( + id=UUID(row.id), + email=row.email, + password_hash=row.password_hash, + system_role=row.system_role, # type: ignore[arg-type] + # SQLite loses tzinfo on read; reattach UTC so downstream + # code can compare timestamps reliably. + created_at=row.created_at if row.created_at.tzinfo else row.created_at.replace(tzinfo=UTC), + oauth_provider=row.oauth_provider, + oauth_id=row.oauth_id, + needs_setup=row.needs_setup, + token_version=row.token_version, + ) + + @staticmethod + def _user_to_row(user: User) -> UserRow: + return UserRow( + id=str(user.id), + email=user.email, + password_hash=user.password_hash, + system_role=user.system_role, + created_at=user.created_at, + oauth_provider=user.oauth_provider, + oauth_id=user.oauth_id, + needs_setup=user.needs_setup, + token_version=user.token_version, + ) + + # ── CRUD ────────────────────────────────────────────────────────── + + async def create_user(self, user: User) -> User: + """Insert a new user. Raises ``ValueError`` on duplicate email.""" + row = self._user_to_row(user) + async with self._sf() as session: + session.add(row) + try: + await session.commit() + except IntegrityError as exc: + await session.rollback() + raise ValueError(f"Email already registered: {user.email}") from exc + return user + + async def get_user_by_id(self, user_id: str) -> User | None: + async with self._sf() as session: + row = await session.get(UserRow, user_id) + return self._row_to_user(row) if row is not None else None + + async def get_user_by_email(self, email: str) -> User | None: + stmt = select(UserRow).where(UserRow.email == email) + async with self._sf() as session: + result = await session.execute(stmt) + row = result.scalar_one_or_none() + return self._row_to_user(row) if row is not None else None + + async def update_user(self, user: User) -> User: + async with self._sf() as session: + row = await session.get(UserRow, str(user.id)) + if row is None: + # Hard fail on concurrent delete: callers (reset_admin, + # password change handlers, _ensure_admin_user) all + # fetched the user just before this call, so a missing + # row here means the row vanished underneath us. Silent + # success would let the caller log "password reset" for + # a row that no longer exists. + raise UserNotFoundError(f"User {user.id} no longer exists") + row.email = user.email + row.password_hash = user.password_hash + row.system_role = user.system_role + row.oauth_provider = user.oauth_provider + row.oauth_id = user.oauth_id + row.needs_setup = user.needs_setup + row.token_version = user.token_version + await session.commit() + return user + + async def count_users(self) -> int: + stmt = select(func.count()).select_from(UserRow) + async with self._sf() as session: + return await session.scalar(stmt) or 0 + + async def count_admin_users(self) -> int: + stmt = select(func.count()).select_from(UserRow).where(UserRow.system_role == "admin") + async with self._sf() as session: + return await session.scalar(stmt) or 0 + + async def get_user_by_oauth(self, provider: str, oauth_id: str) -> User | None: + stmt = select(UserRow).where(UserRow.oauth_provider == provider, UserRow.oauth_id == oauth_id) + async with self._sf() as session: + result = await session.execute(stmt) + row = result.scalar_one_or_none() + return self._row_to_user(row) if row is not None else None diff --git a/backend/app/gateway/auth/reset_admin.py b/backend/app/gateway/auth/reset_admin.py new file mode 100644 index 000000000..7b7da74d0 --- /dev/null +++ b/backend/app/gateway/auth/reset_admin.py @@ -0,0 +1,91 @@ +"""CLI tool to reset an admin password. + +Usage: + python -m app.gateway.auth.reset_admin + python -m app.gateway.auth.reset_admin --email admin@example.com + +Writes the new password to ``.deer-flow/admin_initial_credentials.txt`` +(mode 0600) instead of printing it, so CI / log aggregators never see +the cleartext secret. +""" + +from __future__ import annotations + +import argparse +import asyncio +import secrets +import sys + +from sqlalchemy import select + +from app.gateway.auth.credential_file import write_initial_credentials +from app.gateway.auth.password import hash_password +from app.gateway.auth.repositories.sqlite import SQLiteUserRepository +from deerflow.persistence.user.model import UserRow + + +async def _run(email: str | None) -> int: + from deerflow.config import get_app_config + from deerflow.persistence.engine import ( + close_engine, + get_session_factory, + init_engine_from_config, + ) + + config = get_app_config() + await init_engine_from_config(config.database) + try: + sf = get_session_factory() + if sf is None: + print("Error: persistence engine not available (check config.database).", file=sys.stderr) + return 1 + + repo = SQLiteUserRepository(sf) + + if email: + user = await repo.get_user_by_email(email) + else: + # Find first admin via direct SELECT — repository does not + # expose a "first admin" helper and we do not want to add + # one just for this CLI. + async with sf() as session: + stmt = select(UserRow).where(UserRow.system_role == "admin").limit(1) + row = (await session.execute(stmt)).scalar_one_or_none() + if row is None: + user = None + else: + user = await repo.get_user_by_id(row.id) + + if user is None: + if email: + print(f"Error: user '{email}' not found.", file=sys.stderr) + else: + print("Error: no admin user found.", file=sys.stderr) + return 1 + + new_password = secrets.token_urlsafe(16) + user.password_hash = hash_password(new_password) + user.token_version += 1 + user.needs_setup = True + await repo.update_user(user) + + cred_path = write_initial_credentials(user.email, new_password, label="reset") + print(f"Password reset for: {user.email}") + print(f"Credentials written to: {cred_path} (mode 0600)") + print("Next login will require setup (new email + password).") + return 0 + finally: + await close_engine() + + +def main() -> None: + parser = argparse.ArgumentParser(description="Reset admin password") + parser.add_argument("--email", help="Admin email (default: first admin found)") + args = parser.parse_args() + + exit_code = asyncio.run(_run(args.email)) + sys.exit(exit_code) + + +if __name__ == "__main__": + main() diff --git a/backend/app/gateway/auth_middleware.py b/backend/app/gateway/auth_middleware.py new file mode 100644 index 000000000..6b6452264 --- /dev/null +++ b/backend/app/gateway/auth_middleware.py @@ -0,0 +1,126 @@ +"""Global authentication middleware — fail-closed safety net. + +Rejects unauthenticated requests to non-public paths with 401. When a +request passes the cookie check, resolves the JWT payload to a real +``User`` object and stamps it into both ``request.state.user`` and the +``deerflow.runtime.user_context`` contextvar so that repository-layer +owner filtering works automatically via the sentinel pattern. + +Fine-grained permission checks remain in authz.py decorators. +""" + +from collections.abc import Callable + +from fastapi import HTTPException, Request, Response +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.responses import JSONResponse +from starlette.types import ASGIApp + +from app.gateway.auth.errors import AuthErrorCode, AuthErrorResponse +from app.gateway.authz import _ALL_PERMISSIONS, AuthContext +from app.gateway.internal_auth import INTERNAL_AUTH_HEADER_NAME, get_internal_user, is_valid_internal_auth_token +from deerflow.runtime.user_context import reset_current_user, set_current_user + +# Paths that never require authentication. +_PUBLIC_PATH_PREFIXES: tuple[str, ...] = ( + "/health", + "/docs", + "/redoc", + "/openapi.json", +) + +# Exact auth paths that are public (login/register/status check). +# /api/v1/auth/me, /api/v1/auth/change-password etc. are NOT public. +_PUBLIC_EXACT_PATHS: frozenset[str] = frozenset( + { + "/api/v1/auth/login/local", + "/api/v1/auth/register", + "/api/v1/auth/logout", + "/api/v1/auth/setup-status", + "/api/v1/auth/initialize", + } +) + + +def _is_public(path: str) -> bool: + stripped = path.rstrip("/") + if stripped in _PUBLIC_EXACT_PATHS: + return True + return any(path.startswith(prefix) for prefix in _PUBLIC_PATH_PREFIXES) + + +class AuthMiddleware(BaseHTTPMiddleware): + """Strict auth gate: reject requests without a valid session. + + Two-stage check for non-public paths: + + 1. Cookie presence — return 401 NOT_AUTHENTICATED if missing + 2. JWT validation via ``get_optional_user_from_request`` — return 401 + TOKEN_INVALID if the token is absent, malformed, expired, or the + signed user does not exist / is stale + + On success, stamps ``request.state.user`` and the + ``deerflow.runtime.user_context`` contextvar so that repository-layer + owner filters work downstream without every route needing a + ``@require_auth`` decorator. Routes that need per-resource + authorization (e.g. "user A cannot read user B's thread by guessing + the URL") should additionally use ``@require_permission(..., + owner_check=True)`` for explicit enforcement — but authentication + itself is fully handled here. + """ + + def __init__(self, app: ASGIApp) -> None: + super().__init__(app) + + async def dispatch(self, request: Request, call_next: Callable) -> Response: + if _is_public(request.url.path): + return await call_next(request) + + internal_user = None + if is_valid_internal_auth_token(request.headers.get(INTERNAL_AUTH_HEADER_NAME)): + internal_user = get_internal_user() + + # Non-public path: require session cookie + if internal_user is None and not request.cookies.get("access_token"): + return JSONResponse( + status_code=401, + content={ + "detail": AuthErrorResponse( + code=AuthErrorCode.NOT_AUTHENTICATED, + message="Authentication required", + ).model_dump() + }, + ) + + # Strict JWT validation: reject junk/expired tokens with 401 + # right here instead of silently passing through. This closes + # the "junk cookie bypass" gap (AUTH_TEST_PLAN test 7.5.8): + # without this, non-isolation routes like /api/models would + # accept any cookie-shaped string as authentication. + # + # We call the *strict* resolver so that fine-grained error + # codes (token_expired, token_invalid, user_not_found, …) + # propagate from AuthErrorCode, not get flattened into one + # generic code. BaseHTTPMiddleware doesn't let HTTPException + # bubble up, so we catch and render it as JSONResponse here. + from app.gateway.deps import get_current_user_from_request + + if internal_user is not None: + user = internal_user + else: + try: + user = await get_current_user_from_request(request) + except HTTPException as exc: + return JSONResponse(status_code=exc.status_code, content={"detail": exc.detail}) + + # Stamp both request.state.user (for the contextvar pattern) + # and request.state.auth (so @require_permission's "auth is + # None" branch short-circuits instead of running the entire + # JWT-decode + DB-lookup pipeline a second time per request). + request.state.user = user + request.state.auth = AuthContext(user=user, permissions=_ALL_PERMISSIONS) + token = set_current_user(user) + try: + return await call_next(request) + finally: + reset_current_user(token) diff --git a/backend/app/gateway/authz.py b/backend/app/gateway/authz.py new file mode 100644 index 000000000..c7cf63858 --- /dev/null +++ b/backend/app/gateway/authz.py @@ -0,0 +1,301 @@ +"""Authorization decorators and context for DeerFlow. + +Inspired by LangGraph Auth system: https://github.com/langchain-ai/langgraph/blob/main/libs/sdk-py/langgraph_sdk/auth/__init__.py + +**Usage:** + +1. Use ``@require_auth`` on routes that need authentication +2. Use ``@require_permission("resource", "action", filter_key=...)`` for permission checks +3. The decorator chain processes from bottom to top + +**Example:** + + @router.get("/{thread_id}") + @require_auth + @require_permission("threads", "read", owner_check=True) + async def get_thread(thread_id: str, request: Request): + # User is authenticated and has threads:read permission + ... + +**Permission Model:** + +- threads:read - View thread +- threads:write - Create/update thread +- threads:delete - Delete thread +- runs:create - Run agent +- runs:read - View run +- runs:cancel - Cancel run +""" + +from __future__ import annotations + +import functools +import inspect +from collections.abc import Callable +from types import SimpleNamespace +from typing import TYPE_CHECKING, Any, ParamSpec, TypeVar + +from fastapi import HTTPException, Request + +if TYPE_CHECKING: + from app.gateway.auth.models import User + +P = ParamSpec("P") +T = TypeVar("T") + + +# Permission constants +class Permissions: + """Permission constants for resource:action format.""" + + # Threads + THREADS_READ = "threads:read" + THREADS_WRITE = "threads:write" + THREADS_DELETE = "threads:delete" + + # Runs + RUNS_CREATE = "runs:create" + RUNS_READ = "runs:read" + RUNS_CANCEL = "runs:cancel" + + +class AuthContext: + """Authentication context for the current request. + + Stored in request.state.auth after require_auth decoration. + + Attributes: + user: The authenticated user, or None if anonymous + permissions: List of permission strings (e.g., "threads:read") + """ + + __slots__ = ("user", "permissions") + + def __init__(self, user: User | None = None, permissions: list[str] | None = None): + self.user = user + self.permissions = permissions or [] + + @property + def is_authenticated(self) -> bool: + """Check if user is authenticated.""" + return self.user is not None + + def has_permission(self, resource: str, action: str) -> bool: + """Check if context has permission for resource:action. + + Args: + resource: Resource name (e.g., "threads") + action: Action name (e.g., "read") + + Returns: + True if user has permission + """ + permission = f"{resource}:{action}" + return permission in self.permissions + + def require_user(self) -> User: + """Get user or raise 401. + + Raises: + HTTPException 401 if not authenticated + """ + if not self.user: + raise HTTPException(status_code=401, detail="Authentication required") + return self.user + + +def get_auth_context(request: Request) -> AuthContext | None: + """Get AuthContext from request state.""" + return getattr(request.state, "auth", None) + + +_ALL_PERMISSIONS: list[str] = [ + Permissions.THREADS_READ, + Permissions.THREADS_WRITE, + Permissions.THREADS_DELETE, + Permissions.RUNS_CREATE, + Permissions.RUNS_READ, + Permissions.RUNS_CANCEL, +] + + +def _make_test_request_stub() -> Any: + """Create a minimal request-like object for direct unit calls. + + Used when decorated route handlers are invoked without FastAPI's + request injection. Includes fields accessed by auth helpers. + """ + return SimpleNamespace(state=SimpleNamespace(), cookies={}, _deerflow_test_bypass_auth=True) + + +async def _authenticate(request: Request) -> AuthContext: + """Authenticate request and return AuthContext. + + Delegates to deps.get_optional_user_from_request() for the JWT→User pipeline. + Returns AuthContext with user=None for anonymous requests. + """ + from app.gateway.deps import get_optional_user_from_request + + user = await get_optional_user_from_request(request) + if user is None: + return AuthContext(user=None, permissions=[]) + + # In future, permissions could be stored in user record + return AuthContext(user=user, permissions=_ALL_PERMISSIONS) + + +def require_auth[**P, T](func: Callable[P, T]) -> Callable[P, T]: + """Decorator that authenticates the request and enforces authentication. + + Independently raises HTTP 401 for unauthenticated requests, regardless of + whether ``AuthMiddleware`` is present in the ASGI stack. Sets the resolved + ``AuthContext`` on ``request.state.auth`` for downstream handlers. + + Must be placed ABOVE other decorators (executes after them). + + Usage: + @router.get("/{thread_id}") + @require_auth # Bottom decorator (executes first after permission check) + @require_permission("threads", "read") + async def get_thread(thread_id: str, request: Request): + auth: AuthContext = request.state.auth + ... + + Raises: + HTTPException: 401 if the request is unauthenticated. + ValueError: If 'request' parameter is missing. + """ + + @functools.wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + request = kwargs.get("request") + if request is None: + # Unit tests may call decorated handlers directly without a + # FastAPI Request object. Inject a minimal request stub when + # the wrapped function declares `request`. + if "request" in inspect.signature(func).parameters: + kwargs["request"] = _make_test_request_stub() + else: + raise ValueError("require_auth decorator requires 'request' parameter") + request = kwargs["request"] + + if getattr(request, "_deerflow_test_bypass_auth", False): + return await func(*args, **kwargs) + + # Authenticate and set context + auth_context = await _authenticate(request) + request.state.auth = auth_context + + if not auth_context.is_authenticated: + raise HTTPException(status_code=401, detail="Authentication required") + + return await func(*args, **kwargs) + + return wrapper + + +def require_permission( + resource: str, + action: str, + owner_check: bool = False, + require_existing: bool = False, +) -> Callable[[Callable[P, T]], Callable[P, T]]: + """Decorator that checks permission for resource:action. + + Must be used AFTER @require_auth. + + Args: + resource: Resource name (e.g., "threads", "runs") + action: Action name (e.g., "read", "write", "delete") + owner_check: If True, validates that the current user owns the resource. + Requires 'thread_id' path parameter and performs ownership check. + require_existing: Only meaningful with ``owner_check=True``. If True, a + missing ``threads_meta`` row counts as a denial (404) + instead of "untracked legacy thread, allow". Use on + **destructive / mutating** routes (DELETE, PATCH, + state-update) so a deleted thread can't be re-targeted + by another user via the missing-row code path. + + Usage: + # Read-style: legacy untracked threads are allowed + @require_permission("threads", "read", owner_check=True) + async def get_thread(thread_id: str, request: Request): + ... + + # Destructive: thread row MUST exist and be owned by caller + @require_permission("threads", "delete", owner_check=True, require_existing=True) + async def delete_thread(thread_id: str, request: Request): + ... + + Raises: + HTTPException 401: If authentication required but user is anonymous + HTTPException 403: If user lacks permission + HTTPException 404: If owner_check=True but user doesn't own the thread + ValueError: If owner_check=True but 'thread_id' parameter is missing + """ + + def decorator(func: Callable[P, T]) -> Callable[P, T]: + @functools.wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + request = kwargs.get("request") + if request is None: + # Unit tests may call decorated route handlers directly without + # constructing a FastAPI Request object. Inject a minimal stub + # when the wrapped function declares `request`. + if "request" in inspect.signature(func).parameters: + kwargs["request"] = _make_test_request_stub() + else: + return await func(*args, **kwargs) + request = kwargs["request"] + + if getattr(request, "_deerflow_test_bypass_auth", False): + return await func(*args, **kwargs) + + auth: AuthContext = getattr(request.state, "auth", None) + if auth is None: + auth = await _authenticate(request) + request.state.auth = auth + + if not auth.is_authenticated: + raise HTTPException(status_code=401, detail="Authentication required") + + # Check permission + if not auth.has_permission(resource, action): + raise HTTPException( + status_code=403, + detail=f"Permission denied: {resource}:{action}", + ) + + # Owner check for thread-specific resources. + # + # 2.0-rc moved thread metadata into the SQL persistence layer + # (``threads_meta`` table). We verify ownership via + # ``ThreadMetaStore.check_access``: it returns True for + # missing rows (untracked legacy thread) and for rows whose + # ``user_id`` is NULL (shared / pre-auth data), so this is + # strict-deny rather than strict-allow — only an *existing* + # row with a *different* user_id triggers 404. + if owner_check: + thread_id = kwargs.get("thread_id") + if thread_id is None: + raise ValueError("require_permission with owner_check=True requires 'thread_id' parameter") + + from app.gateway.deps import get_thread_store + + thread_store = get_thread_store(request) + allowed = await thread_store.check_access( + thread_id, + str(auth.user.id), + require_existing=require_existing, + ) + if not allowed: + raise HTTPException( + status_code=404, + detail=f"Thread {thread_id} not found", + ) + + return await func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/backend/app/gateway/config.py b/backend/app/gateway/config.py index 66f1f2a48..95221dad2 100644 --- a/backend/app/gateway/config.py +++ b/backend/app/gateway/config.py @@ -9,6 +9,7 @@ class GatewayConfig(BaseModel): host: str = Field(default="0.0.0.0", description="Host to bind the gateway server") port: int = Field(default=8001, description="Port to bind the gateway server") cors_origins: list[str] = Field(default_factory=lambda: ["http://localhost:3000"], description="Allowed CORS origins") + enable_docs: bool = Field(default=True, description="Enable Swagger/ReDoc/OpenAPI endpoints") _gateway_config: GatewayConfig | None = None @@ -23,5 +24,6 @@ def get_gateway_config() -> GatewayConfig: host=os.getenv("GATEWAY_HOST", "0.0.0.0"), port=int(os.getenv("GATEWAY_PORT", "8001")), cors_origins=cors_origins_str.split(","), + enable_docs=os.getenv("GATEWAY_ENABLE_DOCS", "true").lower() == "true", ) return _gateway_config diff --git a/backend/app/gateway/csrf_middleware.py b/backend/app/gateway/csrf_middleware.py new file mode 100644 index 000000000..4c9b0f36a --- /dev/null +++ b/backend/app/gateway/csrf_middleware.py @@ -0,0 +1,113 @@ +"""CSRF protection middleware for FastAPI. + +Per RFC-001: +State-changing operations require CSRF protection. +""" + +import secrets +from collections.abc import Callable + +from fastapi import Request, Response +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.responses import JSONResponse +from starlette.types import ASGIApp + +CSRF_COOKIE_NAME = "csrf_token" +CSRF_HEADER_NAME = "X-CSRF-Token" +CSRF_TOKEN_LENGTH = 64 # bytes + + +def is_secure_request(request: Request) -> bool: + """Detect whether the original client request was made over HTTPS.""" + return request.headers.get("x-forwarded-proto", request.url.scheme) == "https" + + +def generate_csrf_token() -> str: + """Generate a secure random CSRF token.""" + return secrets.token_urlsafe(CSRF_TOKEN_LENGTH) + + +def should_check_csrf(request: Request) -> bool: + """Determine if a request needs CSRF validation. + + CSRF is checked for state-changing methods (POST, PUT, DELETE, PATCH). + GET, HEAD, OPTIONS, and TRACE are exempt per RFC 7231. + """ + if request.method not in ("POST", "PUT", "DELETE", "PATCH"): + return False + + path = request.url.path.rstrip("/") + # Exempt /api/v1/auth/me endpoint + if path == "/api/v1/auth/me": + return False + return True + + +_AUTH_EXEMPT_PATHS: frozenset[str] = frozenset( + { + "/api/v1/auth/login/local", + "/api/v1/auth/logout", + "/api/v1/auth/register", + "/api/v1/auth/initialize", + } +) + + +def is_auth_endpoint(request: Request) -> bool: + """Check if the request is to an auth endpoint. + + Auth endpoints don't need CSRF validation on first call (no token). + """ + return request.url.path.rstrip("/") in _AUTH_EXEMPT_PATHS + + +class CSRFMiddleware(BaseHTTPMiddleware): + """Middleware that implements CSRF protection using Double Submit Cookie pattern.""" + + def __init__(self, app: ASGIApp) -> None: + super().__init__(app) + + async def dispatch(self, request: Request, call_next: Callable) -> Response: + _is_auth = is_auth_endpoint(request) + + if should_check_csrf(request) and not _is_auth: + cookie_token = request.cookies.get(CSRF_COOKIE_NAME) + header_token = request.headers.get(CSRF_HEADER_NAME) + + if not cookie_token or not header_token: + return JSONResponse( + status_code=403, + content={"detail": "CSRF token missing. Include X-CSRF-Token header."}, + ) + + if not secrets.compare_digest(cookie_token, header_token): + return JSONResponse( + status_code=403, + content={"detail": "CSRF token mismatch."}, + ) + + response = await call_next(request) + + # For auth endpoints that set up session, also set CSRF cookie + if _is_auth and request.method == "POST": + # Generate a new CSRF token for the session + csrf_token = generate_csrf_token() + is_https = is_secure_request(request) + response.set_cookie( + key=CSRF_COOKIE_NAME, + value=csrf_token, + httponly=False, # Must be JS-readable for Double Submit Cookie pattern + secure=is_https, + samesite="strict", + ) + + return response + + +def get_csrf_token(request: Request) -> str | None: + """Get the CSRF token from the current request's cookies. + + This is useful for server-side rendering where you need to embed + token in forms or headers. + """ + return request.cookies.get(CSRF_COOKIE_NAME) diff --git a/backend/app/gateway/deps.py b/backend/app/gateway/deps.py index 115868331..96ea7c5ea 100644 --- a/backend/app/gateway/deps.py +++ b/backend/app/gateway/deps.py @@ -8,12 +8,34 @@ Initialization is handled directly in ``app.py`` via :class:`AsyncExitStack`. from __future__ import annotations -from collections.abc import AsyncGenerator +from collections.abc import AsyncGenerator, Callable from contextlib import AsyncExitStack, asynccontextmanager +from typing import TYPE_CHECKING, TypeVar, cast from fastapi import FastAPI, HTTPException, Request +from langgraph.types import Checkpointer -from deerflow.runtime import RunManager, StreamBridge +from deerflow.config.app_config import AppConfig +from deerflow.persistence.feedback import FeedbackRepository +from deerflow.runtime import RunContext, RunManager, StreamBridge +from deerflow.runtime.events.store.base import RunEventStore +from deerflow.runtime.runs.store.base import RunStore + +if TYPE_CHECKING: + from app.gateway.auth.local_provider import LocalAuthProvider + from app.gateway.auth.repositories.sqlite import SQLiteUserRepository + from deerflow.persistence.thread_meta.base import ThreadMetaStore + + +T = TypeVar("T") + + +def get_config(request: Request) -> AppConfig: + """Return the app-scoped ``AppConfig`` stored on ``app.state``.""" + config = getattr(request.app.state, "config", None) + if config is None: + raise HTTPException(status_code=503, detail="Configuration not available") + return config @asynccontextmanager @@ -25,15 +47,54 @@ async def langgraph_runtime(app: FastAPI) -> AsyncGenerator[None, None]: async with langgraph_runtime(app): yield """ - from deerflow.agents.checkpointer.async_provider import make_checkpointer + from deerflow.persistence.engine import close_engine, get_session_factory, init_engine_from_config from deerflow.runtime import make_store, make_stream_bridge + from deerflow.runtime.checkpointer.async_provider import make_checkpointer + from deerflow.runtime.events.store import make_run_event_store async with AsyncExitStack() as stack: - app.state.stream_bridge = await stack.enter_async_context(make_stream_bridge()) - app.state.checkpointer = await stack.enter_async_context(make_checkpointer()) - app.state.store = await stack.enter_async_context(make_store()) - app.state.run_manager = RunManager() - yield + config = getattr(app.state, "config", None) + if config is None: + raise RuntimeError("langgraph_runtime() requires app.state.config to be initialized") + + app.state.stream_bridge = await stack.enter_async_context(make_stream_bridge(config)) + + # Initialize persistence engine BEFORE checkpointer so that + # auto-create-database logic runs first (postgres backend). + await init_engine_from_config(config.database) + + app.state.checkpointer = await stack.enter_async_context(make_checkpointer(config)) + app.state.store = await stack.enter_async_context(make_store(config)) + + # Initialize repositories — one get_session_factory() call for all. + sf = get_session_factory() + if sf is not None: + from deerflow.persistence.feedback import FeedbackRepository + from deerflow.persistence.run import RunRepository + + app.state.run_store = RunRepository(sf) + app.state.feedback_repo = FeedbackRepository(sf) + else: + from deerflow.runtime.runs.store.memory import MemoryRunStore + + app.state.run_store = MemoryRunStore() + app.state.feedback_repo = None + + from deerflow.persistence.thread_meta import make_thread_store + + app.state.thread_store = make_thread_store(sf, app.state.store) + + # Run event store (has its own factory with config-driven backend selection) + run_events_config = getattr(config, "run_events", None) + app.state.run_event_store = make_run_event_store(run_events_config) + + # RunManager with store backing for persistence + app.state.run_manager = RunManager(store=app.state.run_store) + + try: + yield + finally: + await close_engine() # --------------------------------------------------------------------------- @@ -41,30 +102,144 @@ async def langgraph_runtime(app: FastAPI) -> AsyncGenerator[None, None]: # --------------------------------------------------------------------------- -def get_stream_bridge(request: Request) -> StreamBridge: - """Return the global :class:`StreamBridge`, or 503.""" - bridge = getattr(request.app.state, "stream_bridge", None) - if bridge is None: - raise HTTPException(status_code=503, detail="Stream bridge not available") - return bridge +def _require(attr: str, label: str) -> Callable[[Request], T]: + """Create a FastAPI dependency that returns ``app.state.`` or 503.""" + + def dep(request: Request) -> T: + val = getattr(request.app.state, attr, None) + if val is None: + raise HTTPException(status_code=503, detail=f"{label} not available") + return cast(T, val) + + dep.__name__ = dep.__qualname__ = f"get_{attr}" + return dep -def get_run_manager(request: Request) -> RunManager: - """Return the global :class:`RunManager`, or 503.""" - mgr = getattr(request.app.state, "run_manager", None) - if mgr is None: - raise HTTPException(status_code=503, detail="Run manager not available") - return mgr - - -def get_checkpointer(request: Request): - """Return the global checkpointer, or 503.""" - cp = getattr(request.app.state, "checkpointer", None) - if cp is None: - raise HTTPException(status_code=503, detail="Checkpointer not available") - return cp +get_stream_bridge: Callable[[Request], StreamBridge] = _require("stream_bridge", "Stream bridge") +get_run_manager: Callable[[Request], RunManager] = _require("run_manager", "Run manager") +get_checkpointer: Callable[[Request], Checkpointer] = _require("checkpointer", "Checkpointer") +get_run_event_store: Callable[[Request], RunEventStore] = _require("run_event_store", "Run event store") +get_feedback_repo: Callable[[Request], FeedbackRepository] = _require("feedback_repo", "Feedback") +get_run_store: Callable[[Request], RunStore] = _require("run_store", "Run store") def get_store(request: Request): """Return the global store (may be ``None`` if not configured).""" return getattr(request.app.state, "store", None) + + +def get_thread_store(request: Request) -> ThreadMetaStore: + """Return the thread metadata store (SQL or memory-backed).""" + val = getattr(request.app.state, "thread_store", None) + if val is None: + raise HTTPException(status_code=503, detail="Thread metadata store not available") + return val + + +def get_run_context(request: Request) -> RunContext: + """Build a :class:`RunContext` from ``app.state`` singletons. + + Returns a *base* context with infrastructure dependencies. + """ + config = get_config(request) + return RunContext( + checkpointer=get_checkpointer(request), + store=get_store(request), + event_store=get_run_event_store(request), + run_events_config=getattr(config, "run_events", None), + thread_store=get_thread_store(request), + app_config=config, + ) + + +# --------------------------------------------------------------------------- +# Auth helpers (used by authz.py and auth middleware) +# --------------------------------------------------------------------------- + +# Cached singletons to avoid repeated instantiation per request +_cached_local_provider: LocalAuthProvider | None = None +_cached_repo: SQLiteUserRepository | None = None + + +def get_local_provider() -> LocalAuthProvider: + """Get or create the cached LocalAuthProvider singleton. + + Must be called after ``init_engine_from_config()`` — the shared + session factory is required to construct the user repository. + """ + global _cached_local_provider, _cached_repo + if _cached_repo is None: + from app.gateway.auth.repositories.sqlite import SQLiteUserRepository + from deerflow.persistence.engine import get_session_factory + + sf = get_session_factory() + if sf is None: + raise RuntimeError("get_local_provider() called before init_engine_from_config(); cannot access users table") + _cached_repo = SQLiteUserRepository(sf) + if _cached_local_provider is None: + from app.gateway.auth.local_provider import LocalAuthProvider + + _cached_local_provider = LocalAuthProvider(repository=_cached_repo) + return _cached_local_provider + + +async def get_current_user_from_request(request: Request): + """Get the current authenticated user from the request cookie. + + Raises HTTPException 401 if not authenticated. + """ + from app.gateway.auth import decode_token + from app.gateway.auth.errors import AuthErrorCode, AuthErrorResponse, TokenError, token_error_to_code + + access_token = request.cookies.get("access_token") + if not access_token: + raise HTTPException( + status_code=401, + detail=AuthErrorResponse(code=AuthErrorCode.NOT_AUTHENTICATED, message="Not authenticated").model_dump(), + ) + + payload = decode_token(access_token) + if isinstance(payload, TokenError): + raise HTTPException( + status_code=401, + detail=AuthErrorResponse(code=token_error_to_code(payload), message=f"Token error: {payload.value}").model_dump(), + ) + + provider = get_local_provider() + user = await provider.get_user(payload.sub) + if user is None: + raise HTTPException( + status_code=401, + detail=AuthErrorResponse(code=AuthErrorCode.USER_NOT_FOUND, message="User not found").model_dump(), + ) + + # Token version mismatch → password was changed, token is stale + if user.token_version != payload.ver: + raise HTTPException( + status_code=401, + detail=AuthErrorResponse(code=AuthErrorCode.TOKEN_INVALID, message="Token revoked (password changed)").model_dump(), + ) + + return user + + +async def get_optional_user_from_request(request: Request): + """Get optional authenticated user from request. + + Returns None if not authenticated. + """ + try: + return await get_current_user_from_request(request) + except HTTPException: + return None + + +async def get_current_user(request: Request) -> str | None: + """Extract user_id from request cookie, or None if not authenticated. + + Thin adapter that returns the string id for callers that only need + identification (e.g., ``feedback.py``). Full-user callers should use + ``get_current_user_from_request`` or ``get_optional_user_from_request``. + """ + user = await get_optional_user_from_request(request) + return str(user.id) if user else None diff --git a/backend/app/gateway/internal_auth.py b/backend/app/gateway/internal_auth.py new file mode 100644 index 000000000..b0380379b --- /dev/null +++ b/backend/app/gateway/internal_auth.py @@ -0,0 +1,26 @@ +"""Process-local authentication for Gateway internal callers.""" + +from __future__ import annotations + +import secrets +from types import SimpleNamespace + +from deerflow.runtime.user_context import DEFAULT_USER_ID + +INTERNAL_AUTH_HEADER_NAME = "X-DeerFlow-Internal-Token" +_INTERNAL_AUTH_TOKEN = secrets.token_urlsafe(32) + + +def create_internal_auth_headers() -> dict[str, str]: + """Return headers that authenticate same-process Gateway internal calls.""" + return {INTERNAL_AUTH_HEADER_NAME: _INTERNAL_AUTH_TOKEN} + + +def is_valid_internal_auth_token(token: str | None) -> bool: + """Return True when *token* matches the process-local internal token.""" + return bool(token) and secrets.compare_digest(token, _INTERNAL_AUTH_TOKEN) + + +def get_internal_user(): + """Return the synthetic user used for trusted internal channel calls.""" + return SimpleNamespace(id=DEFAULT_USER_ID, system_role="internal") diff --git a/backend/app/gateway/langgraph_auth.py b/backend/app/gateway/langgraph_auth.py new file mode 100644 index 000000000..38e020150 --- /dev/null +++ b/backend/app/gateway/langgraph_auth.py @@ -0,0 +1,106 @@ +"""LangGraph Server auth handler — shares JWT logic with Gateway. + +Loaded by LangGraph Server via langgraph.json ``auth.path``. +Reuses the same ``decode_token`` / ``get_auth_config`` as Gateway, +so both modes validate tokens with the same secret and rules. + +Two layers: + 1. @auth.authenticate — validates JWT cookie, extracts user_id, + and enforces CSRF on state-changing methods (POST/PUT/DELETE/PATCH) + 2. @auth.on — returns metadata filter so each user only sees own threads +""" + +import secrets + +from langgraph_sdk import Auth + +from app.gateway.auth.errors import TokenError +from app.gateway.auth.jwt import decode_token +from app.gateway.deps import get_local_provider + +auth = Auth() + +# Methods that require CSRF validation (state-changing per RFC 7231). +_CSRF_METHODS = frozenset({"POST", "PUT", "DELETE", "PATCH"}) + + +def _check_csrf(request) -> None: + """Enforce Double Submit Cookie CSRF check for state-changing requests. + + Mirrors Gateway's CSRFMiddleware logic so that LangGraph routes + proxied directly by nginx have the same CSRF protection. + """ + method = getattr(request, "method", "") or "" + if method.upper() not in _CSRF_METHODS: + return + + cookie_token = request.cookies.get("csrf_token") + header_token = request.headers.get("x-csrf-token") + + if not cookie_token or not header_token: + raise Auth.exceptions.HTTPException( + status_code=403, + detail="CSRF token missing. Include X-CSRF-Token header.", + ) + + if not secrets.compare_digest(cookie_token, header_token): + raise Auth.exceptions.HTTPException( + status_code=403, + detail="CSRF token mismatch.", + ) + + +@auth.authenticate +async def authenticate(request): + """Validate the session cookie, decode JWT, and check token_version. + + Same validation chain as Gateway's get_current_user_from_request: + cookie → decode JWT → DB lookup → token_version match + Also enforces CSRF on state-changing methods. + """ + # CSRF check before authentication so forged cross-site requests + # are rejected early, even if the cookie carries a valid JWT. + _check_csrf(request) + + token = request.cookies.get("access_token") + if not token: + raise Auth.exceptions.HTTPException( + status_code=401, + detail="Not authenticated", + ) + + payload = decode_token(token) + if isinstance(payload, TokenError): + raise Auth.exceptions.HTTPException( + status_code=401, + detail="Invalid token", + ) + + user = await get_local_provider().get_user(payload.sub) + if user is None: + raise Auth.exceptions.HTTPException( + status_code=401, + detail="User not found", + ) + if user.token_version != payload.ver: + raise Auth.exceptions.HTTPException( + status_code=401, + detail="Token revoked (password changed)", + ) + + return payload.sub + + +@auth.on +async def add_owner_filter(ctx: Auth.types.AuthContext, value: dict): + """Inject user_id metadata on writes; filter by user_id on reads. + + Gateway stores thread ownership as ``metadata.user_id``. + This handler ensures LangGraph Server enforces the same isolation. + """ + # On create/update: stamp user_id into metadata + metadata = value.setdefault("metadata", {}) + metadata["user_id"] = ctx.user.identity + + # Return filter dict — LangGraph applies it to search/read/delete + return {"user_id": ctx.user.identity} diff --git a/backend/app/gateway/path_utils.py b/backend/app/gateway/path_utils.py index 4869c9404..ded348c78 100644 --- a/backend/app/gateway/path_utils.py +++ b/backend/app/gateway/path_utils.py @@ -5,6 +5,7 @@ from pathlib import Path from fastapi import HTTPException from deerflow.config.paths import get_paths +from deerflow.runtime.user_context import get_effective_user_id def resolve_thread_virtual_path(thread_id: str, virtual_path: str) -> Path: @@ -22,7 +23,7 @@ def resolve_thread_virtual_path(thread_id: str, virtual_path: str) -> Path: HTTPException: If the path is invalid or outside allowed directories. """ try: - return get_paths().resolve_virtual_path(thread_id, virtual_path) + return get_paths().resolve_virtual_path(thread_id, virtual_path, user_id=get_effective_user_id()) except ValueError as e: status = 403 if "traversal" in str(e) else 400 raise HTTPException(status_code=status, detail=str(e)) diff --git a/backend/app/gateway/routers/agents.py b/backend/app/gateway/routers/agents.py index 92002d75b..ff4476893 100644 --- a/backend/app/gateway/routers/agents.py +++ b/backend/app/gateway/routers/agents.py @@ -25,6 +25,7 @@ class AgentResponse(BaseModel): description: str = Field(default="", description="Agent description") model: str | None = Field(default=None, description="Optional model override") tool_groups: list[str] | None = Field(default=None, description="Optional tool group whitelist") + skills: list[str] | None = Field(default=None, description="Optional skill whitelist (None=all, []=none)") soul: str | None = Field(default=None, description="SOUL.md content") @@ -41,6 +42,7 @@ class AgentCreateRequest(BaseModel): description: str = Field(default="", description="Agent description") model: str | None = Field(default=None, description="Optional model override") tool_groups: list[str] | None = Field(default=None, description="Optional tool group whitelist") + skills: list[str] | None = Field(default=None, description="Optional skill whitelist (None=all enabled, []=none)") soul: str = Field(default="", description="SOUL.md content — agent personality and behavioral guardrails") @@ -50,6 +52,7 @@ class AgentUpdateRequest(BaseModel): description: str | None = Field(default=None, description="Updated description") model: str | None = Field(default=None, description="Updated model override") tool_groups: list[str] | None = Field(default=None, description="Updated tool group whitelist") + skills: list[str] | None = Field(default=None, description="Updated skill whitelist (None=all, []=none)") soul: str | None = Field(default=None, description="Updated SOUL.md content") @@ -94,6 +97,7 @@ def _agent_config_to_response(agent_cfg: AgentConfig, include_soul: bool = False description=agent_cfg.description, model=agent_cfg.model, tool_groups=agent_cfg.tool_groups, + skills=agent_cfg.skills, soul=soul, ) @@ -215,6 +219,8 @@ async def create_agent_endpoint(request: AgentCreateRequest) -> AgentResponse: config_data["model"] = request.model if request.tool_groups is not None: config_data["tool_groups"] = request.tool_groups + if request.skills is not None: + config_data["skills"] = request.skills config_file = agent_dir / "config.yaml" with open(config_file, "w", encoding="utf-8") as f: @@ -271,21 +277,32 @@ async def update_agent(name: str, request: AgentUpdateRequest) -> AgentResponse: try: # Update config if any config fields changed - config_changed = any(v is not None for v in [request.description, request.model, request.tool_groups]) + # Use model_fields_set to distinguish "field omitted" from "explicitly set to null". + # This is critical for skills where None means "inherit all" (not "don't change"). + fields_set = request.model_fields_set + config_changed = bool(fields_set & {"description", "model", "tool_groups", "skills"}) if config_changed: updated: dict = { "name": agent_cfg.name, - "description": request.description if request.description is not None else agent_cfg.description, + "description": request.description if "description" in fields_set else agent_cfg.description, } - new_model = request.model if request.model is not None else agent_cfg.model + new_model = request.model if "model" in fields_set else agent_cfg.model if new_model is not None: updated["model"] = new_model - new_tool_groups = request.tool_groups if request.tool_groups is not None else agent_cfg.tool_groups + new_tool_groups = request.tool_groups if "tool_groups" in fields_set else agent_cfg.tool_groups if new_tool_groups is not None: updated["tool_groups"] = new_tool_groups + # skills: None = inherit all, [] = no skills, ["a","b"] = whitelist + if "skills" in fields_set: + new_skills = request.skills + else: + new_skills = agent_cfg.skills + if new_skills is not None: + updated["skills"] = new_skills + config_file = agent_dir / "config.yaml" with open(config_file, "w", encoding="utf-8") as f: yaml.dump(updated, f, default_flow_style=False, allow_unicode=True) diff --git a/backend/app/gateway/routers/artifacts.py b/backend/app/gateway/routers/artifacts.py index a58fd5c0b..78ea5fa00 100644 --- a/backend/app/gateway/routers/artifacts.py +++ b/backend/app/gateway/routers/artifacts.py @@ -7,6 +7,7 @@ from urllib.parse import quote from fastapi import APIRouter, HTTPException, Request from fastapi.responses import FileResponse, PlainTextResponse, Response +from app.gateway.authz import require_permission from app.gateway.path_utils import resolve_thread_virtual_path logger = logging.getLogger(__name__) @@ -81,6 +82,7 @@ def _extract_file_from_skill_archive(zip_path: Path, internal_path: str) -> byte summary="Get Artifact File", description="Retrieve an artifact file generated by the AI agent. Text and binary files can be viewed inline, while active web content is always downloaded.", ) +@require_permission("threads", "read", owner_check=True) async def get_artifact(thread_id: str, path: str, request: Request, download: bool = False) -> Response: """Get an artifact file by its path. diff --git a/backend/app/gateway/routers/auth.py b/backend/app/gateway/routers/auth.py new file mode 100644 index 000000000..3a41e13eb --- /dev/null +++ b/backend/app/gateway/routers/auth.py @@ -0,0 +1,493 @@ +"""Authentication endpoints.""" + +import logging +import os +import time +from ipaddress import ip_address, ip_network + +from fastapi import APIRouter, Depends, HTTPException, Request, Response, status +from fastapi.security import OAuth2PasswordRequestForm +from pydantic import BaseModel, EmailStr, Field, field_validator + +from app.gateway.auth import ( + UserResponse, + create_access_token, +) +from app.gateway.auth.config import get_auth_config +from app.gateway.auth.errors import AuthErrorCode, AuthErrorResponse +from app.gateway.csrf_middleware import is_secure_request +from app.gateway.deps import get_current_user_from_request, get_local_provider + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/api/v1/auth", tags=["auth"]) + + +# ── Request/Response Models ────────────────────────────────────────────── + + +class LoginResponse(BaseModel): + """Response model for login — token only lives in HttpOnly cookie.""" + + expires_in: int # seconds + needs_setup: bool = False + + +# Top common-password blocklist. Drawn from the public SecLists "10k worst +# passwords" set, lowercased + length>=8 only (shorter ones already fail +# the min_length check). Kept tight on purpose: this is the **lower bound** +# defense, not a full HIBP / passlib check, and runs in-process per request. +_COMMON_PASSWORDS: frozenset[str] = frozenset( + { + "password", + "password1", + "password12", + "password123", + "password1234", + "12345678", + "123456789", + "1234567890", + "qwerty12", + "qwertyui", + "qwerty123", + "abc12345", + "abcd1234", + "iloveyou", + "letmein1", + "welcome1", + "welcome123", + "admin123", + "administrator", + "passw0rd", + "p@ssw0rd", + "monkey12", + "trustno1", + "sunshine", + "princess", + "football", + "baseball", + "superman", + "batman123", + "starwars", + "dragon123", + "master123", + "shadow12", + "michael1", + "jennifer", + "computer", + } +) + + +def _password_is_common(password: str) -> bool: + """Case-insensitive blocklist check. + + Lowercases the input so trivial mutations like ``Password`` / + ``PASSWORD`` are also rejected. Does not normalize digit substitutions + (``p@ssw0rd`` is included as a literal entry instead) — keeping the + rule cheap and predictable. + """ + return password.lower() in _COMMON_PASSWORDS + + +def _validate_strong_password(value: str) -> str: + """Pydantic field-validator body shared by Register + ChangePassword. + + Constraint = function, not type-level mixin. The two request models + have no "is-a" relationship; they only share the password-strength + rule. Lifting it into a free function lets each model bind it via + ``@field_validator(field_name)`` without inheritance gymnastics. + """ + if _password_is_common(value): + raise ValueError("Password is too common; choose a stronger password.") + return value + + +class RegisterRequest(BaseModel): + """Request model for user registration.""" + + email: EmailStr + password: str = Field(..., min_length=8) + + _strong_password = field_validator("password")(classmethod(lambda cls, v: _validate_strong_password(v))) + + +class ChangePasswordRequest(BaseModel): + """Request model for password change (also handles setup flow).""" + + current_password: str + new_password: str = Field(..., min_length=8) + new_email: EmailStr | None = None + + _strong_password = field_validator("new_password")(classmethod(lambda cls, v: _validate_strong_password(v))) + + +class MessageResponse(BaseModel): + """Generic message response.""" + + message: str + + +# ── Helpers ─────────────────────────────────────────────────────────────── + + +def _set_session_cookie(response: Response, token: str, request: Request) -> None: + """Set the access_token HttpOnly cookie on the response.""" + config = get_auth_config() + is_https = is_secure_request(request) + response.set_cookie( + key="access_token", + value=token, + httponly=True, + secure=is_https, + samesite="lax", + max_age=config.token_expiry_days * 24 * 3600 if is_https else None, + ) + + +# ── Rate Limiting ──────────────────────────────────────────────────────── +# In-process dict — not shared across workers. +# +# **Limitation**: with multi-worker deployments (e.g., gunicorn -w N), each +# worker maintains its own lockout table, so an attacker effectively gets +# N × _MAX_LOGIN_ATTEMPTS guesses before being locked out everywhere. For +# production multi-worker setups, replace this with a shared store (Redis, +# database-backed counter) to enforce a true per-IP limit. + +_MAX_LOGIN_ATTEMPTS = 5 +_LOCKOUT_SECONDS = 300 # 5 minutes + +# ip → (fail_count, lock_until_timestamp) +_login_attempts: dict[str, tuple[int, float]] = {} + + +def _trusted_proxies() -> list: + """Parse ``AUTH_TRUSTED_PROXIES`` env var into a list of ip_network objects. + + Comma-separated CIDR or single-IP entries. Empty / unset = no proxy is + trusted (direct mode). Invalid entries are skipped with a logger warning. + Read live so env-var overrides take effect immediately and tests can + ``monkeypatch.setenv`` without poking a module-level cache. + """ + raw = os.getenv("AUTH_TRUSTED_PROXIES", "").strip() + if not raw: + return [] + nets = [] + for entry in raw.split(","): + entry = entry.strip() + if not entry: + continue + try: + nets.append(ip_network(entry, strict=False)) + except ValueError: + logger.warning("AUTH_TRUSTED_PROXIES: ignoring invalid entry %r", entry) + return nets + + +def _get_client_ip(request: Request) -> str: + """Extract the real client IP for rate limiting. + + Trust model: + + - The TCP peer (``request.client.host``) is always the baseline. It is + whatever the kernel reports as the connecting socket — unforgeable + by the client itself. + - ``X-Real-IP`` is **only** honored if the TCP peer is in the + ``AUTH_TRUSTED_PROXIES`` allowlist (set via env var, comma-separated + CIDR or single IPs). When set, the gateway is assumed to be behind a + reverse proxy (nginx, Cloudflare, ALB, …) that overwrites + ``X-Real-IP`` with the original client address. + - With no ``AUTH_TRUSTED_PROXIES`` set, ``X-Real-IP`` is silently + ignored — closing the bypass where any client could rotate the + header to dodge per-IP rate limits in dev / direct-gateway mode. + + ``X-Forwarded-For`` is intentionally NOT used because it is naturally + client-controlled at the *first* hop and the trust chain is harder to + audit per-request. + """ + peer_host = request.client.host if request.client else None + + trusted = _trusted_proxies() + if trusted and peer_host: + try: + peer_ip = ip_address(peer_host) + if any(peer_ip in net for net in trusted): + real_ip = request.headers.get("x-real-ip", "").strip() + if real_ip: + return real_ip + except ValueError: + # peer_host wasn't a parseable IP (e.g. "unknown") — fall through + pass + + return peer_host or "unknown" + + +def _check_rate_limit(ip: str) -> None: + """Raise 429 if the IP is currently locked out.""" + record = _login_attempts.get(ip) + if record is None: + return + fail_count, lock_until = record + if fail_count >= _MAX_LOGIN_ATTEMPTS: + if time.time() < lock_until: + raise HTTPException( + status_code=429, + detail="Too many login attempts. Try again later.", + ) + del _login_attempts[ip] + + +_MAX_TRACKED_IPS = 10000 + + +def _record_login_failure(ip: str) -> None: + """Record a failed login attempt for the given IP.""" + # Evict expired lockouts when dict grows too large + if len(_login_attempts) >= _MAX_TRACKED_IPS: + now = time.time() + expired = [k for k, (c, t) in _login_attempts.items() if c >= _MAX_LOGIN_ATTEMPTS and now >= t] + for k in expired: + del _login_attempts[k] + # If still too large, evict cheapest-to-lose half: below-threshold + # IPs (lock_until=0.0) sort first, then earliest-expiring lockouts. + if len(_login_attempts) >= _MAX_TRACKED_IPS: + by_time = sorted(_login_attempts.items(), key=lambda kv: kv[1][1]) + for k, _ in by_time[: len(by_time) // 2]: + del _login_attempts[k] + + record = _login_attempts.get(ip) + if record is None: + _login_attempts[ip] = (1, 0.0) + else: + new_count = record[0] + 1 + lock_until = time.time() + _LOCKOUT_SECONDS if new_count >= _MAX_LOGIN_ATTEMPTS else 0.0 + _login_attempts[ip] = (new_count, lock_until) + + +def _record_login_success(ip: str) -> None: + """Clear failure counter for the given IP on successful login.""" + _login_attempts.pop(ip, None) + + +# ── Endpoints ───────────────────────────────────────────────────────────── + + +@router.post("/login/local", response_model=LoginResponse) +async def login_local( + request: Request, + response: Response, + form_data: OAuth2PasswordRequestForm = Depends(), +): + """Local email/password login.""" + client_ip = _get_client_ip(request) + _check_rate_limit(client_ip) + + user = await get_local_provider().authenticate({"email": form_data.username, "password": form_data.password}) + + if user is None: + _record_login_failure(client_ip) + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=AuthErrorResponse(code=AuthErrorCode.INVALID_CREDENTIALS, message="Incorrect email or password").model_dump(), + ) + + _record_login_success(client_ip) + token = create_access_token(str(user.id), token_version=user.token_version) + _set_session_cookie(response, token, request) + + return LoginResponse( + expires_in=get_auth_config().token_expiry_days * 24 * 3600, + needs_setup=user.needs_setup, + ) + + +@router.post("/register", response_model=UserResponse, status_code=status.HTTP_201_CREATED) +async def register(request: Request, response: Response, body: RegisterRequest): + """Register a new user account (always 'user' role). + + Admin is auto-created on first boot. This endpoint creates regular users. + Auto-login by setting the session cookie. + """ + try: + user = await get_local_provider().create_user(email=body.email, password=body.password, system_role="user") + except ValueError: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=AuthErrorResponse(code=AuthErrorCode.EMAIL_ALREADY_EXISTS, message="Email already registered").model_dump(), + ) + + token = create_access_token(str(user.id), token_version=user.token_version) + _set_session_cookie(response, token, request) + + return UserResponse(id=str(user.id), email=user.email, system_role=user.system_role) + + +@router.post("/logout", response_model=MessageResponse) +async def logout(request: Request, response: Response): + """Logout current user by clearing the cookie.""" + response.delete_cookie(key="access_token", secure=is_secure_request(request), samesite="lax") + return MessageResponse(message="Successfully logged out") + + +@router.post("/change-password", response_model=MessageResponse) +async def change_password(request: Request, response: Response, body: ChangePasswordRequest): + """Change password for the currently authenticated user. + + Also handles the first-boot setup flow: + - If new_email is provided, updates email (checks uniqueness) + - If user.needs_setup is True and new_email is given, clears needs_setup + - Always increments token_version to invalidate old sessions + - Re-issues session cookie with new token_version + """ + from app.gateway.auth.password import hash_password_async, verify_password_async + + user = await get_current_user_from_request(request) + + if user.password_hash is None: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=AuthErrorResponse(code=AuthErrorCode.INVALID_CREDENTIALS, message="OAuth users cannot change password").model_dump()) + + if not await verify_password_async(body.current_password, user.password_hash): + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=AuthErrorResponse(code=AuthErrorCode.INVALID_CREDENTIALS, message="Current password is incorrect").model_dump()) + + provider = get_local_provider() + + # Update email if provided + if body.new_email is not None: + existing = await provider.get_user_by_email(body.new_email) + if existing and str(existing.id) != str(user.id): + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=AuthErrorResponse(code=AuthErrorCode.EMAIL_ALREADY_EXISTS, message="Email already in use").model_dump()) + user.email = body.new_email + + # Update password + bump version + user.password_hash = await hash_password_async(body.new_password) + user.token_version += 1 + + # Clear setup flag if this is the setup flow + if user.needs_setup and body.new_email is not None: + user.needs_setup = False + + await provider.update_user(user) + + # Re-issue cookie with new token_version + token = create_access_token(str(user.id), token_version=user.token_version) + _set_session_cookie(response, token, request) + + return MessageResponse(message="Password changed successfully") + + +@router.get("/me", response_model=UserResponse) +async def get_me(request: Request): + """Get current authenticated user info.""" + user = await get_current_user_from_request(request) + return UserResponse(id=str(user.id), email=user.email, system_role=user.system_role, needs_setup=user.needs_setup) + + +_SETUP_STATUS_COOLDOWN: dict[str, float] = {} +_SETUP_STATUS_COOLDOWN_SECONDS = 60 +_MAX_TRACKED_SETUP_STATUS_IPS = 10000 + + +@router.get("/setup-status") +async def setup_status(request: Request): + """Check if an admin account exists. Returns needs_setup=True when no admin exists.""" + client_ip = _get_client_ip(request) + now = time.time() + last_check = _SETUP_STATUS_COOLDOWN.get(client_ip, 0) + elapsed = now - last_check + if elapsed < _SETUP_STATUS_COOLDOWN_SECONDS: + retry_after = max(1, int(_SETUP_STATUS_COOLDOWN_SECONDS - elapsed)) + raise HTTPException( + status_code=status.HTTP_429_TOO_MANY_REQUESTS, + detail="Setup status check is rate limited", + headers={"Retry-After": str(retry_after)}, + ) + # Evict stale entries when dict grows too large to bound memory usage. + if len(_SETUP_STATUS_COOLDOWN) >= _MAX_TRACKED_SETUP_STATUS_IPS: + cutoff = now - _SETUP_STATUS_COOLDOWN_SECONDS + stale = [k for k, t in _SETUP_STATUS_COOLDOWN.items() if t < cutoff] + for k in stale: + del _SETUP_STATUS_COOLDOWN[k] + # If still too large after evicting expired entries, remove oldest half. + if len(_SETUP_STATUS_COOLDOWN) >= _MAX_TRACKED_SETUP_STATUS_IPS: + by_time = sorted(_SETUP_STATUS_COOLDOWN.items(), key=lambda kv: kv[1]) + for k, _ in by_time[: len(by_time) // 2]: + del _SETUP_STATUS_COOLDOWN[k] + _SETUP_STATUS_COOLDOWN[client_ip] = now + admin_count = await get_local_provider().count_admin_users() + return {"needs_setup": admin_count == 0} + + +class InitializeAdminRequest(BaseModel): + """Request model for first-boot admin account creation.""" + + email: EmailStr + password: str = Field(..., min_length=8) + + _strong_password = field_validator("password")(classmethod(lambda cls, v: _validate_strong_password(v))) + + +@router.post("/initialize", response_model=UserResponse, status_code=status.HTTP_201_CREATED) +async def initialize_admin(request: Request, response: Response, body: InitializeAdminRequest): + """Create the first admin account on initial system setup. + + Only callable when no admin exists. Returns 409 Conflict if an admin + already exists. + + On success, the admin account is created with ``needs_setup=False`` and + the session cookie is set. + """ + admin_count = await get_local_provider().count_admin_users() + if admin_count > 0: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail=AuthErrorResponse(code=AuthErrorCode.SYSTEM_ALREADY_INITIALIZED, message="System already initialized").model_dump(), + ) + + try: + user = await get_local_provider().create_user(email=body.email, password=body.password, system_role="admin", needs_setup=False) + except ValueError: + # DB unique-constraint race: another concurrent request beat us. + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail=AuthErrorResponse(code=AuthErrorCode.SYSTEM_ALREADY_INITIALIZED, message="System already initialized").model_dump(), + ) + + token = create_access_token(str(user.id), token_version=user.token_version) + _set_session_cookie(response, token, request) + + return UserResponse(id=str(user.id), email=user.email, system_role=user.system_role) + + +# ── OAuth Endpoints (Future/Placeholder) ───────────────────────────────── + + +@router.get("/oauth/{provider}") +async def oauth_login(provider: str): + """Initiate OAuth login flow. + + Redirects to the OAuth provider's authorization URL. + Currently a placeholder - requires OAuth provider implementation. + """ + if provider not in ["github", "google"]: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Unsupported OAuth provider: {provider}", + ) + + raise HTTPException( + status_code=status.HTTP_501_NOT_IMPLEMENTED, + detail="OAuth login not yet implemented", + ) + + +@router.get("/callback/{provider}") +async def oauth_callback(provider: str, code: str, state: str): + """OAuth callback endpoint. + + Handles the OAuth provider's callback after user authorization. + Currently a placeholder. + """ + raise HTTPException( + status_code=status.HTTP_501_NOT_IMPLEMENTED, + detail="OAuth callback not yet implemented", + ) diff --git a/backend/app/gateway/routers/feedback.py b/backend/app/gateway/routers/feedback.py new file mode 100644 index 000000000..ca5c1d406 --- /dev/null +++ b/backend/app/gateway/routers/feedback.py @@ -0,0 +1,188 @@ +"""Feedback endpoints — create, list, stats, delete. + +Allows users to submit thumbs-up/down feedback on runs, +optionally scoped to a specific message. +""" + +from __future__ import annotations + +import logging +from typing import Any + +from fastapi import APIRouter, HTTPException, Request +from pydantic import BaseModel, Field + +from app.gateway.authz import require_permission +from app.gateway.deps import get_current_user, get_feedback_repo, get_run_store + +logger = logging.getLogger(__name__) +router = APIRouter(prefix="/api/threads", tags=["feedback"]) + + +# --------------------------------------------------------------------------- +# Request / response models +# --------------------------------------------------------------------------- + + +class FeedbackCreateRequest(BaseModel): + rating: int = Field(..., description="Feedback rating: +1 (positive) or -1 (negative)") + comment: str | None = Field(default=None, description="Optional text feedback") + message_id: str | None = Field(default=None, description="Optional: scope feedback to a specific message") + + +class FeedbackUpsertRequest(BaseModel): + rating: int = Field(..., description="Feedback rating: +1 (positive) or -1 (negative)") + comment: str | None = Field(default=None, description="Optional text feedback") + + +class FeedbackResponse(BaseModel): + feedback_id: str + run_id: str + thread_id: str + user_id: str | None = None + message_id: str | None = None + rating: int + comment: str | None = None + created_at: str = "" + + +class FeedbackStatsResponse(BaseModel): + run_id: str + total: int = 0 + positive: int = 0 + negative: int = 0 + + +# --------------------------------------------------------------------------- +# Endpoints +# --------------------------------------------------------------------------- + + +@router.put("/{thread_id}/runs/{run_id}/feedback", response_model=FeedbackResponse) +@require_permission("threads", "write", owner_check=True, require_existing=True) +async def upsert_feedback( + thread_id: str, + run_id: str, + body: FeedbackUpsertRequest, + request: Request, +) -> dict[str, Any]: + """Create or update feedback for a run (idempotent).""" + if body.rating not in (1, -1): + raise HTTPException(status_code=400, detail="rating must be +1 or -1") + + user_id = await get_current_user(request) + + run_store = get_run_store(request) + run = await run_store.get(run_id) + if run is None: + raise HTTPException(status_code=404, detail=f"Run {run_id} not found") + if run.get("thread_id") != thread_id: + raise HTTPException(status_code=404, detail=f"Run {run_id} not found in thread {thread_id}") + + feedback_repo = get_feedback_repo(request) + return await feedback_repo.upsert( + run_id=run_id, + thread_id=thread_id, + rating=body.rating, + user_id=user_id, + comment=body.comment, + ) + + +@router.delete("/{thread_id}/runs/{run_id}/feedback") +@require_permission("threads", "delete", owner_check=True, require_existing=True) +async def delete_run_feedback( + thread_id: str, + run_id: str, + request: Request, +) -> dict[str, bool]: + """Delete the current user's feedback for a run.""" + user_id = await get_current_user(request) + feedback_repo = get_feedback_repo(request) + deleted = await feedback_repo.delete_by_run( + thread_id=thread_id, + run_id=run_id, + user_id=user_id, + ) + if not deleted: + raise HTTPException(status_code=404, detail="No feedback found for this run") + return {"success": True} + + +@router.post("/{thread_id}/runs/{run_id}/feedback", response_model=FeedbackResponse) +@require_permission("threads", "write", owner_check=True, require_existing=True) +async def create_feedback( + thread_id: str, + run_id: str, + body: FeedbackCreateRequest, + request: Request, +) -> dict[str, Any]: + """Submit feedback (thumbs-up/down) for a run.""" + if body.rating not in (1, -1): + raise HTTPException(status_code=400, detail="rating must be +1 or -1") + + user_id = await get_current_user(request) + + # Validate run exists and belongs to thread + run_store = get_run_store(request) + run = await run_store.get(run_id) + if run is None: + raise HTTPException(status_code=404, detail=f"Run {run_id} not found") + if run.get("thread_id") != thread_id: + raise HTTPException(status_code=404, detail=f"Run {run_id} not found in thread {thread_id}") + + feedback_repo = get_feedback_repo(request) + return await feedback_repo.create( + run_id=run_id, + thread_id=thread_id, + rating=body.rating, + user_id=user_id, + message_id=body.message_id, + comment=body.comment, + ) + + +@router.get("/{thread_id}/runs/{run_id}/feedback", response_model=list[FeedbackResponse]) +@require_permission("threads", "read", owner_check=True) +async def list_feedback( + thread_id: str, + run_id: str, + request: Request, +) -> list[dict[str, Any]]: + """List all feedback for a run.""" + feedback_repo = get_feedback_repo(request) + return await feedback_repo.list_by_run(thread_id, run_id) + + +@router.get("/{thread_id}/runs/{run_id}/feedback/stats", response_model=FeedbackStatsResponse) +@require_permission("threads", "read", owner_check=True) +async def feedback_stats( + thread_id: str, + run_id: str, + request: Request, +) -> dict[str, Any]: + """Get aggregated feedback stats (positive/negative counts) for a run.""" + feedback_repo = get_feedback_repo(request) + return await feedback_repo.aggregate_by_run(thread_id, run_id) + + +@router.delete("/{thread_id}/runs/{run_id}/feedback/{feedback_id}") +@require_permission("threads", "delete", owner_check=True, require_existing=True) +async def delete_feedback( + thread_id: str, + run_id: str, + feedback_id: str, + request: Request, +) -> dict[str, bool]: + """Delete a feedback record.""" + feedback_repo = get_feedback_repo(request) + # Verify feedback belongs to the specified thread/run before deleting + existing = await feedback_repo.get(feedback_id) + if existing is None: + raise HTTPException(status_code=404, detail=f"Feedback {feedback_id} not found") + if existing.get("thread_id") != thread_id or existing.get("run_id") != run_id: + raise HTTPException(status_code=404, detail=f"Feedback {feedback_id} not found in run {run_id}") + deleted = await feedback_repo.delete(feedback_id) + if not deleted: + raise HTTPException(status_code=404, detail=f"Feedback {feedback_id} not found") + return {"success": True} diff --git a/backend/app/gateway/routers/memory.py b/backend/app/gateway/routers/memory.py index 6ee546924..ca9e5f5e5 100644 --- a/backend/app/gateway/routers/memory.py +++ b/backend/app/gateway/routers/memory.py @@ -13,6 +13,7 @@ from deerflow.agents.memory.updater import ( update_memory_fact, ) from deerflow.config.memory_config import get_memory_config +from deerflow.runtime.user_context import get_effective_user_id router = APIRouter(prefix="/api", tags=["memory"]) @@ -147,7 +148,7 @@ async def get_memory() -> MemoryResponse: } ``` """ - memory_data = get_memory_data() + memory_data = get_memory_data(user_id=get_effective_user_id()) return MemoryResponse(**memory_data) @@ -167,7 +168,7 @@ async def reload_memory() -> MemoryResponse: Returns: The reloaded memory data. """ - memory_data = reload_memory_data() + memory_data = reload_memory_data(user_id=get_effective_user_id()) return MemoryResponse(**memory_data) @@ -181,7 +182,7 @@ async def reload_memory() -> MemoryResponse: async def clear_memory() -> MemoryResponse: """Clear all persisted memory data.""" try: - memory_data = clear_memory_data() + memory_data = clear_memory_data(user_id=get_effective_user_id()) except OSError as exc: raise HTTPException(status_code=500, detail="Failed to clear memory data.") from exc @@ -202,6 +203,7 @@ async def create_memory_fact_endpoint(request: FactCreateRequest) -> MemoryRespo content=request.content, category=request.category, confidence=request.confidence, + user_id=get_effective_user_id(), ) except ValueError as exc: raise _map_memory_fact_value_error(exc) from exc @@ -221,7 +223,7 @@ async def create_memory_fact_endpoint(request: FactCreateRequest) -> MemoryRespo async def delete_memory_fact_endpoint(fact_id: str) -> MemoryResponse: """Delete a single fact from memory by fact id.""" try: - memory_data = delete_memory_fact(fact_id) + memory_data = delete_memory_fact(fact_id, user_id=get_effective_user_id()) except KeyError as exc: raise HTTPException(status_code=404, detail=f"Memory fact '{fact_id}' not found.") from exc except OSError as exc: @@ -245,6 +247,7 @@ async def update_memory_fact_endpoint(fact_id: str, request: FactPatchRequest) - content=request.content, category=request.category, confidence=request.confidence, + user_id=get_effective_user_id(), ) except ValueError as exc: raise _map_memory_fact_value_error(exc) from exc @@ -265,7 +268,7 @@ async def update_memory_fact_endpoint(fact_id: str, request: FactPatchRequest) - ) async def export_memory() -> MemoryResponse: """Export the current memory data.""" - memory_data = get_memory_data() + memory_data = get_memory_data(user_id=get_effective_user_id()) return MemoryResponse(**memory_data) @@ -279,7 +282,7 @@ async def export_memory() -> MemoryResponse: async def import_memory(request: MemoryResponse) -> MemoryResponse: """Import and persist memory data.""" try: - memory_data = import_memory_data(request.model_dump()) + memory_data = import_memory_data(request.model_dump(), user_id=get_effective_user_id()) except OSError as exc: raise HTTPException(status_code=500, detail="Failed to import memory data.") from exc @@ -337,7 +340,7 @@ async def get_memory_status() -> MemoryStatusResponse: Combined memory configuration and current data. """ config = get_memory_config() - memory_data = get_memory_data() + memory_data = get_memory_data(user_id=get_effective_user_id()) return MemoryStatusResponse( config=MemoryConfigResponse( diff --git a/backend/app/gateway/routers/models.py b/backend/app/gateway/routers/models.py index 11a87a872..a36ece927 100644 --- a/backend/app/gateway/routers/models.py +++ b/backend/app/gateway/routers/models.py @@ -1,7 +1,8 @@ -from fastapi import APIRouter, HTTPException +from fastapi import APIRouter, Depends, HTTPException from pydantic import BaseModel, Field -from deerflow.config import get_app_config +from app.gateway.deps import get_config +from deerflow.config.app_config import AppConfig router = APIRouter(prefix="/api", tags=["models"]) @@ -36,7 +37,7 @@ class ModelsListResponse(BaseModel): summary="List All Models", description="Retrieve a list of all available AI models configured in the system.", ) -async def list_models() -> ModelsListResponse: +async def list_models(config: AppConfig = Depends(get_config)) -> ModelsListResponse: """List all available models from configuration. Returns model information suitable for frontend display, @@ -72,7 +73,6 @@ async def list_models() -> ModelsListResponse: } ``` """ - config = get_app_config() models = [ ModelResponse( name=model.name, @@ -96,7 +96,7 @@ async def list_models() -> ModelsListResponse: summary="Get Model Details", description="Retrieve detailed information about a specific AI model by its name.", ) -async def get_model(model_name: str) -> ModelResponse: +async def get_model(model_name: str, config: AppConfig = Depends(get_config)) -> ModelResponse: """Get a specific model by name. Args: @@ -118,7 +118,6 @@ async def get_model(model_name: str) -> ModelResponse: } ``` """ - config = get_app_config() model = config.get_model_config(model_name) if model is None: raise HTTPException(status_code=404, detail=f"Model '{model_name}' not found") diff --git a/backend/app/gateway/routers/runs.py b/backend/app/gateway/routers/runs.py index 7d17488fc..f2775466c 100644 --- a/backend/app/gateway/routers/runs.py +++ b/backend/app/gateway/routers/runs.py @@ -11,10 +11,11 @@ import asyncio import logging import uuid -from fastapi import APIRouter, Request +from fastapi import APIRouter, HTTPException, Query, Request from fastapi.responses import StreamingResponse -from app.gateway.deps import get_checkpointer, get_run_manager, get_stream_bridge +from app.gateway.authz import require_permission +from app.gateway.deps import get_checkpointer, get_feedback_repo, get_run_event_store, get_run_manager, get_run_store, get_stream_bridge from app.gateway.routers.thread_runs import RunCreateRequest from app.gateway.services import sse_consumer, start_run from deerflow.runtime import serialize_channel_values @@ -85,3 +86,58 @@ async def stateless_wait(body: RunCreateRequest, request: Request) -> dict: logger.exception("Failed to fetch final state for run %s", record.run_id) return {"status": record.status.value, "error": record.error} + + +# --------------------------------------------------------------------------- +# Run-scoped read endpoints +# --------------------------------------------------------------------------- + + +async def _resolve_run(run_id: str, request: Request) -> dict: + """Fetch run by run_id with user ownership check. Raises 404 if not found.""" + run_store = get_run_store(request) + record = await run_store.get(run_id) # user_id=AUTO filters by contextvar + if record is None: + raise HTTPException(status_code=404, detail=f"Run {run_id} not found") + return record + + +@router.get("/{run_id}/messages") +@require_permission("runs", "read") +async def run_messages( + run_id: str, + request: Request, + limit: int = Query(default=50, le=200, ge=1), + before_seq: int | None = Query(default=None), + after_seq: int | None = Query(default=None), +) -> dict: + """Return paginated messages for a run (cursor-based). + + Pagination: + - after_seq: messages with seq > after_seq (forward) + - before_seq: messages with seq < before_seq (backward) + - neither: latest messages + + Response: { data: [...], has_more: bool } + """ + run = await _resolve_run(run_id, request) + event_store = get_run_event_store(request) + rows = await event_store.list_messages_by_run( + run["thread_id"], + run_id, + limit=limit + 1, + before_seq=before_seq, + after_seq=after_seq, + ) + has_more = len(rows) > limit + data = rows[:limit] if has_more else rows + return {"data": data, "has_more": has_more} + + +@router.get("/{run_id}/feedback") +@require_permission("runs", "read") +async def run_feedback(run_id: str, request: Request) -> list[dict]: + """Return all feedback for a run.""" + run = await _resolve_run(run_id, request) + feedback_repo = get_feedback_repo(request) + return await feedback_repo.list_by_run(run["thread_id"], run_id) diff --git a/backend/app/gateway/routers/skills.py b/backend/app/gateway/routers/skills.py index 5fac32d41..78462ae09 100644 --- a/backend/app/gateway/routers/skills.py +++ b/backend/app/gateway/routers/skills.py @@ -1,30 +1,20 @@ -import errno import json import logging -import shutil from pathlib import Path -from fastapi import APIRouter, HTTPException +from fastapi import APIRouter, Depends, HTTPException from pydantic import BaseModel, Field +from app.gateway.deps import get_config from app.gateway.path_utils import resolve_thread_virtual_path from deerflow.agents.lead_agent.prompt import refresh_skills_system_prompt_cache_async +from deerflow.config.app_config import AppConfig from deerflow.config.extensions_config import ExtensionsConfig, SkillStateConfig, get_extensions_config, reload_extensions_config -from deerflow.skills import Skill, load_skills -from deerflow.skills.installer import SkillAlreadyExistsError, install_skill_from_archive -from deerflow.skills.manager import ( - append_history, - atomic_write, - custom_skill_exists, - ensure_custom_skill_is_editable, - get_custom_skill_dir, - get_custom_skill_file, - get_skill_history_file, - read_custom_skill_content, - read_history, - validate_skill_markdown_content, -) +from deerflow.skills import Skill +from deerflow.skills.installer import SkillAlreadyExistsError from deerflow.skills.security_scanner import scan_skill_content +from deerflow.skills.storage import get_or_new_skill_storage +from deerflow.skills.types import SKILL_MD_FILE, SkillCategory logger = logging.getLogger(__name__) @@ -37,7 +27,7 @@ class SkillResponse(BaseModel): name: str = Field(..., description="Name of the skill") description: str = Field(..., description="Description of what the skill does") license: str | None = Field(None, description="License information") - category: str = Field(..., description="Category of the skill (public or custom)") + category: SkillCategory = Field(..., description="Category of the skill (public or custom)") enabled: bool = Field(default=True, description="Whether this skill is enabled") @@ -101,9 +91,9 @@ def _skill_to_response(skill: Skill) -> SkillResponse: summary="List All Skills", description="Retrieve a list of all available skills from both public and custom directories.", ) -async def list_skills() -> SkillsListResponse: +async def list_skills(config: AppConfig = Depends(get_config)) -> SkillsListResponse: try: - skills = load_skills(enabled_only=False) + skills = get_or_new_skill_storage(app_config=config).load_skills(enabled_only=False) return SkillsListResponse(skills=[_skill_to_response(skill) for skill in skills]) except Exception as e: logger.error(f"Failed to load skills: {e}", exc_info=True) @@ -116,10 +106,10 @@ async def list_skills() -> SkillsListResponse: summary="Install Skill", description="Install a skill from a .skill file (ZIP archive) located in the thread's user-data directory.", ) -async def install_skill(request: SkillInstallRequest) -> SkillInstallResponse: +async def install_skill(request: SkillInstallRequest, config: AppConfig = Depends(get_config)) -> SkillInstallResponse: try: skill_file_path = resolve_thread_virtual_path(request.thread_id, request.path) - result = install_skill_from_archive(skill_file_path) + result = await get_or_new_skill_storage(app_config=config).ainstall_skill_from_archive(skill_file_path) await refresh_skills_system_prompt_cache_async() return SkillInstallResponse(**result) except FileNotFoundError as e: @@ -136,9 +126,9 @@ async def install_skill(request: SkillInstallRequest) -> SkillInstallResponse: @router.get("/skills/custom", response_model=SkillsListResponse, summary="List Custom Skills") -async def list_custom_skills() -> SkillsListResponse: +async def list_custom_skills(config: AppConfig = Depends(get_config)) -> SkillsListResponse: try: - skills = [skill for skill in load_skills(enabled_only=False) if skill.category == "custom"] + skills = [skill for skill in get_or_new_skill_storage(app_config=config).load_skills(enabled_only=False) if skill.category == SkillCategory.CUSTOM] return SkillsListResponse(skills=[_skill_to_response(skill) for skill in skills]) except Exception as e: logger.error("Failed to list custom skills: %s", e, exc_info=True) @@ -146,13 +136,14 @@ async def list_custom_skills() -> SkillsListResponse: @router.get("/skills/custom/{skill_name}", response_model=CustomSkillContentResponse, summary="Get Custom Skill Content") -async def get_custom_skill(skill_name: str) -> CustomSkillContentResponse: +async def get_custom_skill(skill_name: str, config: AppConfig = Depends(get_config)) -> CustomSkillContentResponse: try: - skills = load_skills(enabled_only=False) - skill = next((s for s in skills if s.name == skill_name and s.category == "custom"), None) + skill_name = skill_name.replace("\r\n", "").replace("\n", "") + skills = get_or_new_skill_storage(app_config=config).load_skills(enabled_only=False) + skill = next((s for s in skills if s.name == skill_name and s.category == SkillCategory.CUSTOM), None) if skill is None: raise HTTPException(status_code=404, detail=f"Custom skill '{skill_name}' not found") - return CustomSkillContentResponse(**_skill_to_response(skill).model_dump(), content=read_custom_skill_content(skill_name)) + return CustomSkillContentResponse(**_skill_to_response(skill).model_dump(), content=get_or_new_skill_storage(app_config=config).read_custom_skill(skill_name)) except HTTPException: raise except Exception as e: @@ -161,30 +152,31 @@ async def get_custom_skill(skill_name: str) -> CustomSkillContentResponse: @router.put("/skills/custom/{skill_name}", response_model=CustomSkillContentResponse, summary="Edit Custom Skill") -async def update_custom_skill(skill_name: str, request: CustomSkillUpdateRequest) -> CustomSkillContentResponse: +async def update_custom_skill(skill_name: str, request: CustomSkillUpdateRequest, config: AppConfig = Depends(get_config)) -> CustomSkillContentResponse: try: - ensure_custom_skill_is_editable(skill_name) - validate_skill_markdown_content(skill_name, request.content) - scan = await scan_skill_content(request.content, executable=False, location=f"{skill_name}/SKILL.md") + skill_name = skill_name.replace("\r\n", "").replace("\n", "") + storage = get_or_new_skill_storage(app_config=config) + storage.ensure_custom_skill_is_editable(skill_name) + storage.validate_skill_markdown_content(skill_name, request.content) + scan = await scan_skill_content(request.content, executable=False, location=f"{skill_name}/{SKILL_MD_FILE}", app_config=config) if scan.decision == "block": raise HTTPException(status_code=400, detail=f"Security scan blocked the edit: {scan.reason}") - skill_file = get_custom_skill_dir(skill_name) / "SKILL.md" - prev_content = skill_file.read_text(encoding="utf-8") - atomic_write(skill_file, request.content) - append_history( + prev_content = storage.read_custom_skill(skill_name) + storage.write_custom_skill(skill_name, SKILL_MD_FILE, request.content) + storage.append_history( skill_name, { "action": "human_edit", "author": "human", "thread_id": None, - "file_path": "SKILL.md", + "file_path": SKILL_MD_FILE, "prev_content": prev_content, "new_content": request.content, "scanner": {"decision": scan.decision, "reason": scan.reason}, }, ) await refresh_skills_system_prompt_cache_async() - return await get_custom_skill(skill_name) + return await get_custom_skill(skill_name, config) except HTTPException: raise except FileNotFoundError as e: @@ -197,29 +189,22 @@ async def update_custom_skill(skill_name: str, request: CustomSkillUpdateRequest @router.delete("/skills/custom/{skill_name}", summary="Delete Custom Skill") -async def delete_custom_skill(skill_name: str) -> dict[str, bool]: +async def delete_custom_skill(skill_name: str, config: AppConfig = Depends(get_config)) -> dict[str, bool]: try: - ensure_custom_skill_is_editable(skill_name) - skill_dir = get_custom_skill_dir(skill_name) - prev_content = read_custom_skill_content(skill_name) - try: - append_history( - skill_name, - { - "action": "human_delete", - "author": "human", - "thread_id": None, - "file_path": "SKILL.md", - "prev_content": prev_content, - "new_content": None, - "scanner": {"decision": "allow", "reason": "Deletion requested."}, - }, - ) - except OSError as e: - if not isinstance(e, PermissionError) and e.errno not in {errno.EACCES, errno.EPERM, errno.EROFS}: - raise - logger.warning("Skipping delete history write for custom skill %s due to readonly/permission failure; continuing with skill directory removal: %s", skill_name, e) - shutil.rmtree(skill_dir) + skill_name = skill_name.replace("\r\n", "").replace("\n", "") + storage = get_or_new_skill_storage(app_config=config) + storage.delete_custom_skill( + skill_name, + history_meta={ + "action": "human_delete", + "author": "human", + "thread_id": None, + "file_path": SKILL_MD_FILE, + "prev_content": None, + "new_content": None, + "scanner": {"decision": "allow", "reason": "Deletion requested."}, + }, + ) await refresh_skills_system_prompt_cache_async() return {"success": True} except FileNotFoundError as e: @@ -232,11 +217,13 @@ async def delete_custom_skill(skill_name: str) -> dict[str, bool]: @router.get("/skills/custom/{skill_name}/history", response_model=CustomSkillHistoryResponse, summary="Get Custom Skill History") -async def get_custom_skill_history(skill_name: str) -> CustomSkillHistoryResponse: +async def get_custom_skill_history(skill_name: str, config: AppConfig = Depends(get_config)) -> CustomSkillHistoryResponse: try: - if not custom_skill_exists(skill_name) and not get_skill_history_file(skill_name).exists(): + skill_name = skill_name.replace("\r\n", "").replace("\n", "") + storage = get_or_new_skill_storage(app_config=config) + if not storage.custom_skill_exists(skill_name) and not storage.get_skill_history_file(skill_name).exists(): raise HTTPException(status_code=404, detail=f"Custom skill '{skill_name}' not found") - return CustomSkillHistoryResponse(history=read_history(skill_name)) + return CustomSkillHistoryResponse(history=storage.read_history(skill_name)) except HTTPException: raise except Exception as e: @@ -245,38 +232,39 @@ async def get_custom_skill_history(skill_name: str) -> CustomSkillHistoryRespons @router.post("/skills/custom/{skill_name}/rollback", response_model=CustomSkillContentResponse, summary="Rollback Custom Skill") -async def rollback_custom_skill(skill_name: str, request: SkillRollbackRequest) -> CustomSkillContentResponse: +async def rollback_custom_skill(skill_name: str, request: SkillRollbackRequest, config: AppConfig = Depends(get_config)) -> CustomSkillContentResponse: try: - if not custom_skill_exists(skill_name) and not get_skill_history_file(skill_name).exists(): + storage = get_or_new_skill_storage(app_config=config) + if not storage.custom_skill_exists(skill_name) and not storage.get_skill_history_file(skill_name).exists(): raise HTTPException(status_code=404, detail=f"Custom skill '{skill_name}' not found") - history = read_history(skill_name) + history = storage.read_history(skill_name) if not history: raise HTTPException(status_code=400, detail=f"Custom skill '{skill_name}' has no history") record = history[request.history_index] target_content = record.get("prev_content") if target_content is None: raise HTTPException(status_code=400, detail="Selected history entry has no previous content to roll back to") - validate_skill_markdown_content(skill_name, target_content) - scan = await scan_skill_content(target_content, executable=False, location=f"{skill_name}/SKILL.md") - skill_file = get_custom_skill_file(skill_name) + storage.validate_skill_markdown_content(skill_name, target_content) + scan = await scan_skill_content(target_content, executable=False, location=f"{skill_name}/{SKILL_MD_FILE}", app_config=config) + skill_file = storage.get_custom_skill_file(skill_name) current_content = skill_file.read_text(encoding="utf-8") if skill_file.exists() else None history_entry = { "action": "rollback", "author": "human", "thread_id": None, - "file_path": "SKILL.md", + "file_path": SKILL_MD_FILE, "prev_content": current_content, "new_content": target_content, "rollback_from_ts": record.get("ts"), "scanner": {"decision": scan.decision, "reason": scan.reason}, } if scan.decision == "block": - append_history(skill_name, history_entry) + storage.append_history(skill_name, history_entry) raise HTTPException(status_code=400, detail=f"Rollback blocked by security scanner: {scan.reason}") - atomic_write(skill_file, target_content) - append_history(skill_name, history_entry) + storage.write_custom_skill(skill_name, SKILL_MD_FILE, target_content) + storage.append_history(skill_name, history_entry) await refresh_skills_system_prompt_cache_async() - return await get_custom_skill(skill_name) + return await get_custom_skill(skill_name, config) except HTTPException: raise except IndexError: @@ -296,9 +284,10 @@ async def rollback_custom_skill(skill_name: str, request: SkillRollbackRequest) summary="Get Skill Details", description="Retrieve detailed information about a specific skill by its name.", ) -async def get_skill(skill_name: str) -> SkillResponse: +async def get_skill(skill_name: str, config: AppConfig = Depends(get_config)) -> SkillResponse: try: - skills = load_skills(enabled_only=False) + skill_name = skill_name.replace("\r\n", "").replace("\n", "") + skills = get_or_new_skill_storage(app_config=config).load_skills(enabled_only=False) skill = next((s for s in skills if s.name == skill_name), None) if skill is None: @@ -318,9 +307,10 @@ async def get_skill(skill_name: str) -> SkillResponse: summary="Update Skill", description="Update a skill's enabled status by modifying the extensions_config.json file.", ) -async def update_skill(skill_name: str, request: SkillUpdateRequest) -> SkillResponse: +async def update_skill(skill_name: str, request: SkillUpdateRequest, config: AppConfig = Depends(get_config)) -> SkillResponse: try: - skills = load_skills(enabled_only=False) + skill_name = skill_name.replace("\r\n", "").replace("\n", "") + skills = get_or_new_skill_storage(app_config=config).load_skills(enabled_only=False) skill = next((s for s in skills if s.name == skill_name), None) if skill is None: @@ -346,7 +336,7 @@ async def update_skill(skill_name: str, request: SkillUpdateRequest) -> SkillRes reload_extensions_config() await refresh_skills_system_prompt_cache_async() - skills = load_skills(enabled_only=False) + skills = get_or_new_skill_storage(app_config=config).load_skills(enabled_only=False) updated_skill = next((s for s in skills if s.name == skill_name), None) if updated_skill is None: diff --git a/backend/app/gateway/routers/suggestions.py b/backend/app/gateway/routers/suggestions.py index ac54e674d..56d99b9d3 100644 --- a/backend/app/gateway/routers/suggestions.py +++ b/backend/app/gateway/routers/suggestions.py @@ -1,10 +1,13 @@ import json import logging -from fastapi import APIRouter +from fastapi import APIRouter, Depends, Request from langchain_core.messages import HumanMessage, SystemMessage from pydantic import BaseModel, Field +from app.gateway.authz import require_permission +from app.gateway.deps import get_config +from deerflow.config.app_config import AppConfig from deerflow.models import create_chat_model logger = logging.getLogger(__name__) @@ -98,12 +101,18 @@ def _format_conversation(messages: list[SuggestionMessage]) -> str: summary="Generate Follow-up Questions", description="Generate short follow-up questions a user might ask next, based on recent conversation context.", ) -async def generate_suggestions(thread_id: str, request: SuggestionsRequest) -> SuggestionsResponse: - if not request.messages: +@require_permission("threads", "read", owner_check=True) +async def generate_suggestions( + thread_id: str, + body: SuggestionsRequest, + request: Request, + config: AppConfig = Depends(get_config), +) -> SuggestionsResponse: + if not body.messages: return SuggestionsResponse(suggestions=[]) - n = request.n - conversation = _format_conversation(request.messages) + n = body.n + conversation = _format_conversation(body.messages) if not conversation: return SuggestionsResponse(suggestions=[]) @@ -120,8 +129,8 @@ async def generate_suggestions(thread_id: str, request: SuggestionsRequest) -> S user_content = f"Conversation Context:\n{conversation}\n\nGenerate {n} follow-up questions" try: - model = create_chat_model(name=request.model_name, thinking_enabled=False) - response = await model.ainvoke([SystemMessage(content=system_instruction), HumanMessage(content=user_content)]) + model = create_chat_model(name=body.model_name, thinking_enabled=False, app_config=config) + response = await model.ainvoke([SystemMessage(content=system_instruction), HumanMessage(content=user_content)], config={"run_name": "suggest_agent"}) raw = _extract_response_text(response.content) suggestions = _parse_json_string_list(raw) or [] cleaned = [s.replace("\n", " ").strip() for s in suggestions if s.strip()] diff --git a/backend/app/gateway/routers/thread_runs.py b/backend/app/gateway/routers/thread_runs.py index 105fc9ca6..e6847c50f 100644 --- a/backend/app/gateway/routers/thread_runs.py +++ b/backend/app/gateway/routers/thread_runs.py @@ -19,7 +19,8 @@ from fastapi import APIRouter, HTTPException, Query, Request from fastapi.responses import Response, StreamingResponse from pydantic import BaseModel, Field -from app.gateway.deps import get_checkpointer, get_run_manager, get_stream_bridge +from app.gateway.authz import require_permission +from app.gateway.deps import get_checkpointer, get_current_user, get_feedback_repo, get_run_event_store, get_run_manager, get_run_store, get_stream_bridge from app.gateway.services import sse_consumer, start_run from deerflow.runtime import RunRecord, serialize_channel_values @@ -92,6 +93,7 @@ def _record_to_response(record: RunRecord) -> RunResponse: @router.post("/{thread_id}/runs", response_model=RunResponse) +@require_permission("runs", "create", owner_check=True, require_existing=True) async def create_run(thread_id: str, body: RunCreateRequest, request: Request) -> RunResponse: """Create a background run (returns immediately).""" record = await start_run(body, thread_id, request) @@ -99,6 +101,7 @@ async def create_run(thread_id: str, body: RunCreateRequest, request: Request) - @router.post("/{thread_id}/runs/stream") +@require_permission("runs", "create", owner_check=True, require_existing=True) async def stream_run(thread_id: str, body: RunCreateRequest, request: Request) -> StreamingResponse: """Create a run and stream events via SSE. @@ -126,6 +129,7 @@ async def stream_run(thread_id: str, body: RunCreateRequest, request: Request) - @router.post("/{thread_id}/runs/wait", response_model=dict) +@require_permission("runs", "create", owner_check=True, require_existing=True) async def wait_run(thread_id: str, body: RunCreateRequest, request: Request) -> dict: """Create a run and block until it completes, returning the final state.""" record = await start_run(body, thread_id, request) @@ -151,6 +155,7 @@ async def wait_run(thread_id: str, body: RunCreateRequest, request: Request) -> @router.get("/{thread_id}/runs", response_model=list[RunResponse]) +@require_permission("runs", "read", owner_check=True) async def list_runs(thread_id: str, request: Request) -> list[RunResponse]: """List all runs for a thread.""" run_mgr = get_run_manager(request) @@ -159,6 +164,7 @@ async def list_runs(thread_id: str, request: Request) -> list[RunResponse]: @router.get("/{thread_id}/runs/{run_id}", response_model=RunResponse) +@require_permission("runs", "read", owner_check=True) async def get_run(thread_id: str, run_id: str, request: Request) -> RunResponse: """Get details of a specific run.""" run_mgr = get_run_manager(request) @@ -169,6 +175,7 @@ async def get_run(thread_id: str, run_id: str, request: Request) -> RunResponse: @router.post("/{thread_id}/runs/{run_id}/cancel") +@require_permission("runs", "cancel", owner_check=True, require_existing=True) async def cancel_run( thread_id: str, run_id: str, @@ -206,6 +213,7 @@ async def cancel_run( @router.get("/{thread_id}/runs/{run_id}/join") +@require_permission("runs", "read", owner_check=True) async def join_run(thread_id: str, run_id: str, request: Request) -> StreamingResponse: """Join an existing run's SSE stream.""" bridge = get_stream_bridge(request) @@ -226,6 +234,7 @@ async def join_run(thread_id: str, run_id: str, request: Request) -> StreamingRe @router.api_route("/{thread_id}/runs/{run_id}/stream", methods=["GET", "POST"], response_model=None) +@require_permission("runs", "read", owner_check=True) async def stream_existing_run( thread_id: str, run_id: str, @@ -265,3 +274,104 @@ async def stream_existing_run( "X-Accel-Buffering": "no", }, ) + + +# --------------------------------------------------------------------------- +# Messages / Events / Token usage endpoints +# --------------------------------------------------------------------------- + + +@router.get("/{thread_id}/messages") +@require_permission("runs", "read", owner_check=True) +async def list_thread_messages( + thread_id: str, + request: Request, + limit: int = Query(default=50, le=200), + before_seq: int | None = Query(default=None), + after_seq: int | None = Query(default=None), +) -> list[dict]: + """Return displayable messages for a thread (across all runs), with feedback attached.""" + event_store = get_run_event_store(request) + messages = await event_store.list_messages(thread_id, limit=limit, before_seq=before_seq, after_seq=after_seq) + + # Attach feedback to the last AI message of each run + feedback_repo = get_feedback_repo(request) + user_id = await get_current_user(request) + feedback_map = await feedback_repo.list_by_thread_grouped(thread_id, user_id=user_id) + + # Find the last ai_message per run_id + last_ai_per_run: dict[str, int] = {} # run_id -> index in messages list + for i, msg in enumerate(messages): + if msg.get("event_type") == "ai_message": + last_ai_per_run[msg["run_id"]] = i + + # Attach feedback field + last_ai_indices = set(last_ai_per_run.values()) + for i, msg in enumerate(messages): + if i in last_ai_indices: + run_id = msg["run_id"] + fb = feedback_map.get(run_id) + msg["feedback"] = ( + { + "feedback_id": fb["feedback_id"], + "rating": fb["rating"], + "comment": fb.get("comment"), + } + if fb + else None + ) + else: + msg["feedback"] = None + + return messages + + +@router.get("/{thread_id}/runs/{run_id}/messages") +@require_permission("runs", "read", owner_check=True) +async def list_run_messages( + thread_id: str, + run_id: str, + request: Request, + limit: int = Query(default=50, le=200, ge=1), + before_seq: int | None = Query(default=None), + after_seq: int | None = Query(default=None), +) -> dict: + """Return paginated messages for a specific run. + + Response: { data: [...], has_more: bool } + """ + event_store = get_run_event_store(request) + rows = await event_store.list_messages_by_run( + thread_id, + run_id, + limit=limit + 1, + before_seq=before_seq, + after_seq=after_seq, + ) + has_more = len(rows) > limit + data = rows[:limit] if has_more else rows + return {"data": data, "has_more": has_more} + + +@router.get("/{thread_id}/runs/{run_id}/events") +@require_permission("runs", "read", owner_check=True) +async def list_run_events( + thread_id: str, + run_id: str, + request: Request, + event_types: str | None = Query(default=None), + limit: int = Query(default=500, le=2000), +) -> list[dict]: + """Return the full event stream for a run (debug/audit).""" + event_store = get_run_event_store(request) + types = event_types.split(",") if event_types else None + return await event_store.list_events(thread_id, run_id, event_types=types, limit=limit) + + +@router.get("/{thread_id}/token-usage") +@require_permission("threads", "read", owner_check=True) +async def thread_token_usage(thread_id: str, request: Request) -> dict: + """Thread-level token usage aggregation.""" + run_store = get_run_store(request) + agg = await run_store.aggregate_tokens_by_thread(thread_id) + return {"thread_id": thread_id, **agg} diff --git a/backend/app/gateway/routers/threads.py b/backend/app/gateway/routers/threads.py index 808604980..484582839 100644 --- a/backend/app/gateway/routers/threads.py +++ b/backend/app/gateway/routers/threads.py @@ -18,23 +18,35 @@ import uuid from typing import Any from fastapi import APIRouter, HTTPException, Request -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, field_validator -from app.gateway.deps import get_checkpointer, get_store +from app.gateway.authz import require_permission +from app.gateway.deps import get_checkpointer +from app.gateway.utils import sanitize_log_param from deerflow.config.paths import Paths, get_paths from deerflow.runtime import serialize_channel_values - -# --------------------------------------------------------------------------- -# Store namespace -# --------------------------------------------------------------------------- - -THREADS_NS: tuple[str, ...] = ("threads",) -"""Namespace used by the Store for thread metadata records.""" +from deerflow.runtime.user_context import get_effective_user_id logger = logging.getLogger(__name__) router = APIRouter(prefix="/api/threads", tags=["threads"]) +# Metadata keys that the server controls; clients are not allowed to set +# them. Pydantic ``@field_validator("metadata")`` strips them on every +# inbound model below so a malicious client cannot reflect a forged +# owner identity through the API surface. Defense-in-depth — the +# row-level invariant is still ``threads_meta.user_id`` populated from +# the auth contextvar; this list closes the metadata-blob echo gap. +_SERVER_RESERVED_METADATA_KEYS: frozenset[str] = frozenset({"owner_id", "user_id"}) + + +def _strip_reserved_metadata(metadata: dict[str, Any] | None) -> dict[str, Any]: + """Return ``metadata`` with server-controlled keys removed.""" + if not metadata: + return metadata or {} + return {k: v for k, v in metadata.items() if k not in _SERVER_RESERVED_METADATA_KEYS} + + # --------------------------------------------------------------------------- # Response / request models # --------------------------------------------------------------------------- @@ -63,8 +75,11 @@ class ThreadCreateRequest(BaseModel): """Request body for creating a thread.""" thread_id: str | None = Field(default=None, description="Optional thread ID (auto-generated if omitted)") + assistant_id: str | None = Field(default=None, description="Associate thread with an assistant") metadata: dict[str, Any] = Field(default_factory=dict, description="Initial metadata") + _strip_reserved = field_validator("metadata")(classmethod(lambda cls, v: _strip_reserved_metadata(v))) + class ThreadSearchRequest(BaseModel): """Request body for searching threads.""" @@ -93,6 +108,8 @@ class ThreadPatchRequest(BaseModel): metadata: dict[str, Any] = Field(default_factory=dict, description="Metadata to merge") + _strip_reserved = field_validator("metadata")(classmethod(lambda cls, v: _strip_reserved_metadata(v))) + class ThreadStateUpdateRequest(BaseModel): """Request body for updating thread state (human-in-the-loop resume).""" @@ -126,70 +143,25 @@ class ThreadHistoryRequest(BaseModel): # --------------------------------------------------------------------------- -def _delete_thread_data(thread_id: str, paths: Paths | None = None) -> ThreadDeleteResponse: +def _delete_thread_data(thread_id: str, paths: Paths | None = None, *, user_id: str | None = None) -> ThreadDeleteResponse: """Delete local persisted filesystem data for a thread.""" path_manager = paths or get_paths() try: - path_manager.delete_thread_dir(thread_id) + path_manager.delete_thread_dir(thread_id, user_id=user_id) except ValueError as exc: raise HTTPException(status_code=422, detail=str(exc)) from exc except FileNotFoundError: # Not critical — thread data may not exist on disk - logger.debug("No local thread data to delete for %s", thread_id) + logger.debug("No local thread data to delete for %s", sanitize_log_param(thread_id)) return ThreadDeleteResponse(success=True, message=f"No local data for {thread_id}") except Exception as exc: - logger.exception("Failed to delete thread data for %s", thread_id) + logger.exception("Failed to delete thread data for %s", sanitize_log_param(thread_id)) raise HTTPException(status_code=500, detail="Failed to delete local thread data.") from exc - logger.info("Deleted local thread data for %s", thread_id) + logger.info("Deleted local thread data for %s", sanitize_log_param(thread_id)) return ThreadDeleteResponse(success=True, message=f"Deleted local thread data for {thread_id}") -async def _store_get(store, thread_id: str) -> dict | None: - """Fetch a thread record from the Store; returns ``None`` if absent.""" - item = await store.aget(THREADS_NS, thread_id) - return item.value if item is not None else None - - -async def _store_put(store, record: dict) -> None: - """Write a thread record to the Store.""" - await store.aput(THREADS_NS, record["thread_id"], record) - - -async def _store_upsert(store, thread_id: str, *, metadata: dict | None = None, values: dict | None = None) -> None: - """Create or refresh a thread record in the Store. - - On creation the record is written with ``status="idle"``. On update only - ``updated_at`` (and optionally ``metadata`` / ``values``) are changed so - that existing fields are preserved. - - ``values`` carries the agent-state snapshot exposed to the frontend - (currently just ``{"title": "..."}``). - """ - now = time.time() - existing = await _store_get(store, thread_id) - if existing is None: - await _store_put( - store, - { - "thread_id": thread_id, - "status": "idle", - "created_at": now, - "updated_at": now, - "metadata": metadata or {}, - "values": values or {}, - }, - ) - else: - val = dict(existing) - val["updated_at"] = now - if metadata: - val.setdefault("metadata", {}).update(metadata) - if values: - val.setdefault("values", {}).update(values) - await _store_put(store, val) - - def _derive_thread_status(checkpoint_tuple) -> str: """Derive thread status from checkpoint metadata.""" if checkpoint_tuple is None: @@ -215,22 +187,18 @@ def _derive_thread_status(checkpoint_tuple) -> str: @router.delete("/{thread_id}", response_model=ThreadDeleteResponse) +@require_permission("threads", "delete", owner_check=True, require_existing=True) async def delete_thread_data(thread_id: str, request: Request) -> ThreadDeleteResponse: """Delete local persisted filesystem data for a thread. Cleans DeerFlow-managed thread directories, removes checkpoint data, - and removes the thread record from the Store. + and removes the thread_meta row from the configured ThreadMetaStore + (sqlite or memory). """ - # Clean local filesystem - response = _delete_thread_data(thread_id) + from app.gateway.deps import get_thread_store - # Remove from Store (best-effort) - store = get_store(request) - if store is not None: - try: - await store.adelete(THREADS_NS, thread_id) - except Exception: - logger.debug("Could not delete store record for thread %s (not critical)", thread_id) + # Clean local filesystem + response = _delete_thread_data(thread_id, user_id=get_effective_user_id()) # Remove checkpoints (best-effort) checkpointer = getattr(request.app.state, "checkpointer", None) @@ -239,7 +207,15 @@ async def delete_thread_data(thread_id: str, request: Request) -> ThreadDeleteRe if hasattr(checkpointer, "adelete_thread"): await checkpointer.adelete_thread(thread_id) except Exception: - logger.debug("Could not delete checkpoints for thread %s (not critical)", thread_id) + logger.debug("Could not delete checkpoints for thread %s (not critical)", sanitize_log_param(thread_id)) + + # Remove thread_meta row (best-effort) — required for sqlite backend + # so the deleted thread no longer appears in /threads/search. + try: + thread_store = get_thread_store(request) + await thread_store.delete(thread_id) + except Exception: + logger.debug("Could not delete thread_meta for %s (not critical)", sanitize_log_param(thread_id)) return response @@ -248,43 +224,40 @@ async def delete_thread_data(thread_id: str, request: Request) -> ThreadDeleteRe async def create_thread(body: ThreadCreateRequest, request: Request) -> ThreadResponse: """Create a new thread. - The thread record is written to the Store (for fast listing) and an - empty checkpoint is written to the checkpointer (for state reads). + Writes a thread_meta record (so the thread appears in /threads/search) + and an empty checkpoint (so state endpoints work immediately). Idempotent: returns the existing record when ``thread_id`` already exists. """ - store = get_store(request) + from app.gateway.deps import get_thread_store + checkpointer = get_checkpointer(request) + thread_store = get_thread_store(request) thread_id = body.thread_id or str(uuid.uuid4()) now = time.time() + # ``body.metadata`` is already stripped of server-reserved keys by + # ``ThreadCreateRequest._strip_reserved`` — see the model definition. - # Idempotency: return existing record from Store when already present - if store is not None: - existing_record = await _store_get(store, thread_id) - if existing_record is not None: - return ThreadResponse( - thread_id=thread_id, - status=existing_record.get("status", "idle"), - created_at=str(existing_record.get("created_at", "")), - updated_at=str(existing_record.get("updated_at", "")), - metadata=existing_record.get("metadata", {}), - ) + # Idempotency: return existing record when already present + existing_record = await thread_store.get(thread_id) + if existing_record is not None: + return ThreadResponse( + thread_id=thread_id, + status=existing_record.get("status", "idle"), + created_at=str(existing_record.get("created_at", "")), + updated_at=str(existing_record.get("updated_at", "")), + metadata=existing_record.get("metadata", {}), + ) - # Write thread record to Store - if store is not None: - try: - await _store_put( - store, - { - "thread_id": thread_id, - "status": "idle", - "created_at": now, - "updated_at": now, - "metadata": body.metadata, - }, - ) - except Exception: - logger.exception("Failed to write thread %s to store", thread_id) - raise HTTPException(status_code=500, detail="Failed to create thread") + # Write thread_meta so the thread appears in /threads/search immediately + try: + await thread_store.create( + thread_id, + assistant_id=getattr(body, "assistant_id", None), + metadata=body.metadata, + ) + except Exception: + logger.exception("Failed to write thread_meta for %s", sanitize_log_param(thread_id)) + raise HTTPException(status_code=500, detail="Failed to create thread") # Write an empty checkpoint so state endpoints work immediately config = {"configurable": {"thread_id": thread_id, "checkpoint_ns": ""}} @@ -301,10 +274,10 @@ async def create_thread(body: ThreadCreateRequest, request: Request) -> ThreadRe } await checkpointer.aput(config, empty_checkpoint(), ckpt_metadata, {}) except Exception: - logger.exception("Failed to create checkpoint for thread %s", thread_id) + logger.exception("Failed to create checkpoint for thread %s", sanitize_log_param(thread_id)) raise HTTPException(status_code=500, detail="Failed to create thread") - logger.info("Thread created: %s", thread_id) + logger.info("Thread created: %s", sanitize_log_param(thread_id)) return ThreadResponse( thread_id=thread_id, status="idle", @@ -318,166 +291,91 @@ async def create_thread(body: ThreadCreateRequest, request: Request) -> ThreadRe async def search_threads(body: ThreadSearchRequest, request: Request) -> list[ThreadResponse]: """Search and list threads. - Two-phase approach: - - **Phase 1 — Store (fast path, O(threads))**: returns threads that were - created or run through this Gateway. Store records are tiny metadata - dicts so fetching all of them at once is cheap. - - **Phase 2 — Checkpointer supplement (lazy migration)**: threads that - were created directly by LangGraph Server (and therefore absent from the - Store) are discovered here by iterating the shared checkpointer. Any - newly found thread is immediately written to the Store so that the next - search skips Phase 2 for that thread — the Store converges to a full - index over time without a one-shot migration job. + Delegates to the configured ThreadMetaStore implementation + (SQL-backed for sqlite/postgres, Store-backed for memory mode). """ - store = get_store(request) - checkpointer = get_checkpointer(request) + from app.gateway.deps import get_thread_store - # ----------------------------------------------------------------------- - # Phase 1: Store - # ----------------------------------------------------------------------- - merged: dict[str, ThreadResponse] = {} - - if store is not None: - try: - items = await store.asearch(THREADS_NS, limit=10_000) - except Exception: - logger.warning("Store search failed — falling back to checkpointer only", exc_info=True) - items = [] - - for item in items: - val = item.value - merged[val["thread_id"]] = ThreadResponse( - thread_id=val["thread_id"], - status=val.get("status", "idle"), - created_at=str(val.get("created_at", "")), - updated_at=str(val.get("updated_at", "")), - metadata=val.get("metadata", {}), - values=val.get("values", {}), - ) - - # ----------------------------------------------------------------------- - # Phase 2: Checkpointer supplement - # Discovers threads not yet in the Store (e.g. created by LangGraph - # Server) and lazily migrates them so future searches skip this phase. - # ----------------------------------------------------------------------- - try: - async for checkpoint_tuple in checkpointer.alist(None): - cfg = getattr(checkpoint_tuple, "config", {}) - thread_id = cfg.get("configurable", {}).get("thread_id") - if not thread_id or thread_id in merged: - continue - - # Skip sub-graph checkpoints (checkpoint_ns is non-empty for those) - if cfg.get("configurable", {}).get("checkpoint_ns", ""): - continue - - ckpt_meta = getattr(checkpoint_tuple, "metadata", {}) or {} - # Strip LangGraph internal keys from the user-visible metadata dict - user_meta = {k: v for k, v in ckpt_meta.items() if k not in ("created_at", "updated_at", "step", "source", "writes", "parents")} - - # Extract state values (title) from the checkpoint's channel_values - checkpoint_data = getattr(checkpoint_tuple, "checkpoint", {}) or {} - channel_values = checkpoint_data.get("channel_values", {}) - ckpt_values = {} - if title := channel_values.get("title"): - ckpt_values["title"] = title - - thread_resp = ThreadResponse( - thread_id=thread_id, - status=_derive_thread_status(checkpoint_tuple), - created_at=str(ckpt_meta.get("created_at", "")), - updated_at=str(ckpt_meta.get("updated_at", ckpt_meta.get("created_at", ""))), - metadata=user_meta, - values=ckpt_values, - ) - merged[thread_id] = thread_resp - - # Lazy migration — write to Store so the next search finds it there - if store is not None: - try: - await _store_upsert(store, thread_id, metadata=user_meta, values=ckpt_values or None) - except Exception: - logger.debug("Failed to migrate thread %s to store (non-fatal)", thread_id) - except Exception: - logger.exception("Checkpointer scan failed during thread search") - # Don't raise — return whatever was collected from Store + partial scan - - # ----------------------------------------------------------------------- - # Phase 3: Filter → sort → paginate - # ----------------------------------------------------------------------- - results = list(merged.values()) - - if body.metadata: - results = [r for r in results if all(r.metadata.get(k) == v for k, v in body.metadata.items())] - - if body.status: - results = [r for r in results if r.status == body.status] - - results.sort(key=lambda r: r.updated_at, reverse=True) - return results[body.offset : body.offset + body.limit] + repo = get_thread_store(request) + rows = await repo.search( + metadata=body.metadata or None, + status=body.status, + limit=body.limit, + offset=body.offset, + ) + return [ + ThreadResponse( + thread_id=r["thread_id"], + status=r.get("status", "idle"), + created_at=r.get("created_at", ""), + updated_at=r.get("updated_at", ""), + metadata=r.get("metadata", {}), + values={"title": r["display_name"]} if r.get("display_name") else {}, + interrupts={}, + ) + for r in rows + ] @router.patch("/{thread_id}", response_model=ThreadResponse) +@require_permission("threads", "write", owner_check=True, require_existing=True) async def patch_thread(thread_id: str, body: ThreadPatchRequest, request: Request) -> ThreadResponse: """Merge metadata into a thread record.""" - store = get_store(request) - if store is None: - raise HTTPException(status_code=503, detail="Store not available") + from app.gateway.deps import get_thread_store - record = await _store_get(store, thread_id) + thread_store = get_thread_store(request) + record = await thread_store.get(thread_id) if record is None: raise HTTPException(status_code=404, detail=f"Thread {thread_id} not found") - now = time.time() - updated = dict(record) - updated.setdefault("metadata", {}).update(body.metadata) - updated["updated_at"] = now - + # ``body.metadata`` already stripped by ``ThreadPatchRequest._strip_reserved``. try: - await _store_put(store, updated) + await thread_store.update_metadata(thread_id, body.metadata) except Exception: - logger.exception("Failed to patch thread %s", thread_id) + logger.exception("Failed to patch thread %s", sanitize_log_param(thread_id)) raise HTTPException(status_code=500, detail="Failed to update thread") + # Re-read to get the merged metadata + refreshed updated_at + record = await thread_store.get(thread_id) or record return ThreadResponse( thread_id=thread_id, - status=updated.get("status", "idle"), - created_at=str(updated.get("created_at", "")), - updated_at=str(now), - metadata=updated.get("metadata", {}), + status=record.get("status", "idle"), + created_at=str(record.get("created_at", "")), + updated_at=str(record.get("updated_at", "")), + metadata=record.get("metadata", {}), ) @router.get("/{thread_id}", response_model=ThreadResponse) +@require_permission("threads", "read", owner_check=True) async def get_thread(thread_id: str, request: Request) -> ThreadResponse: """Get thread info. - Reads metadata from the Store and derives the accurate execution - status from the checkpointer. Falls back to the checkpointer alone - for threads that pre-date Store adoption (backward compat). + Reads metadata from the ThreadMetaStore and derives the accurate + execution status from the checkpointer. Falls back to the checkpointer + alone for threads that pre-date ThreadMetaStore adoption (backward compat). """ - store = get_store(request) + from app.gateway.deps import get_thread_store + + thread_store = get_thread_store(request) checkpointer = get_checkpointer(request) - record: dict | None = None - if store is not None: - record = await _store_get(store, thread_id) + record: dict | None = await thread_store.get(thread_id) # Derive accurate status from the checkpointer config = {"configurable": {"thread_id": thread_id, "checkpoint_ns": ""}} try: checkpoint_tuple = await checkpointer.aget_tuple(config) except Exception: - logger.exception("Failed to get checkpoint for thread %s", thread_id) + logger.exception("Failed to get checkpoint for thread %s", sanitize_log_param(thread_id)) raise HTTPException(status_code=500, detail="Failed to get thread") if record is None and checkpoint_tuple is None: raise HTTPException(status_code=404, detail=f"Thread {thread_id} not found") - # If the thread exists in the checkpointer but not the store (e.g. legacy - # data), synthesize a minimal store record from the checkpoint metadata. + # If the thread exists in the checkpointer but not in thread_meta (e.g. + # legacy data created before thread_meta adoption), synthesize a minimal + # record from the checkpoint metadata. if record is None and checkpoint_tuple is not None: ckpt_meta = getattr(checkpoint_tuple, "metadata", {}) or {} record = { @@ -505,7 +403,9 @@ async def get_thread(thread_id: str, request: Request) -> ThreadResponse: ) +# --------------------------------------------------------------------------- @router.get("/{thread_id}/state", response_model=ThreadStateResponse) +@require_permission("threads", "read", owner_check=True) async def get_thread_state(thread_id: str, request: Request) -> ThreadStateResponse: """Get the latest state snapshot for a thread. @@ -518,7 +418,7 @@ async def get_thread_state(thread_id: str, request: Request) -> ThreadStateRespo try: checkpoint_tuple = await checkpointer.aget_tuple(config) except Exception: - logger.exception("Failed to get state for thread %s", thread_id) + logger.exception("Failed to get state for thread %s", sanitize_log_param(thread_id)) raise HTTPException(status_code=500, detail="Failed to get thread state") if checkpoint_tuple is None: @@ -542,8 +442,10 @@ async def get_thread_state(thread_id: str, request: Request) -> ThreadStateRespo next_tasks = [t.name for t in tasks_raw if hasattr(t, "name")] tasks = [{"id": getattr(t, "id", ""), "name": getattr(t, "name", "")} for t in tasks_raw] + values = serialize_channel_values(channel_values) + return ThreadStateResponse( - values=serialize_channel_values(channel_values), + values=values, next=next_tasks, metadata=metadata, checkpoint={"id": checkpoint_id, "ts": str(metadata.get("created_at", ""))}, @@ -555,15 +457,19 @@ async def get_thread_state(thread_id: str, request: Request) -> ThreadStateRespo @router.post("/{thread_id}/state", response_model=ThreadStateResponse) +@require_permission("threads", "write", owner_check=True, require_existing=True) async def update_thread_state(thread_id: str, body: ThreadStateUpdateRequest, request: Request) -> ThreadStateResponse: """Update thread state (e.g. for human-in-the-loop resume or title rename). Writes a new checkpoint that merges *body.values* into the latest - channel values, then syncs any updated ``title`` field back to the Store - so that ``/threads/search`` reflects the change immediately. + channel values, then syncs any updated ``title`` field through the + ThreadMetaStore abstraction so that ``/threads/search`` reflects the + change immediately in both sqlite and memory backends. """ + from app.gateway.deps import get_thread_store + checkpointer = get_checkpointer(request) - store = get_store(request) + thread_store = get_thread_store(request) # checkpoint_ns must be present in the config for aput — default to "" # (the root graph namespace). checkpoint_id is optional; omitting it @@ -580,7 +486,7 @@ async def update_thread_state(thread_id: str, body: ThreadStateUpdateRequest, re try: checkpoint_tuple = await checkpointer.aget_tuple(read_config) except Exception: - logger.exception("Failed to get state for thread %s", thread_id) + logger.exception("Failed to get state for thread %s", sanitize_log_param(thread_id)) raise HTTPException(status_code=500, detail="Failed to get thread state") if checkpoint_tuple is None: @@ -614,19 +520,22 @@ async def update_thread_state(thread_id: str, body: ThreadStateUpdateRequest, re try: new_config = await checkpointer.aput(write_config, checkpoint, metadata, {}) except Exception: - logger.exception("Failed to update state for thread %s", thread_id) + logger.exception("Failed to update state for thread %s", sanitize_log_param(thread_id)) raise HTTPException(status_code=500, detail="Failed to update thread state") new_checkpoint_id: str | None = None if isinstance(new_config, dict): new_checkpoint_id = new_config.get("configurable", {}).get("checkpoint_id") - # Sync title changes to the Store so /threads/search reflects them immediately. - if store is not None and body.values and "title" in body.values: - try: - await _store_upsert(store, thread_id, values={"title": body.values["title"]}) - except Exception: - logger.debug("Failed to sync title to store for thread %s (non-fatal)", thread_id) + # Sync title changes through the ThreadMetaStore abstraction so /threads/search + # reflects them immediately in both sqlite and memory backends. + if body.values and "title" in body.values: + new_title = body.values["title"] + if new_title: # Skip empty strings and None + try: + await thread_store.update_display_name(thread_id, new_title) + except Exception: + logger.debug("Failed to sync title to thread_meta for %s (non-fatal)", sanitize_log_param(thread_id)) return ThreadStateResponse( values=serialize_channel_values(channel_values), @@ -638,8 +547,16 @@ async def update_thread_state(thread_id: str, body: ThreadStateUpdateRequest, re @router.post("/{thread_id}/history", response_model=list[HistoryEntry]) +@require_permission("threads", "read", owner_check=True) async def get_thread_history(thread_id: str, body: ThreadHistoryRequest, request: Request) -> list[HistoryEntry]: - """Get checkpoint history for a thread.""" + """Get checkpoint history for a thread. + + Messages are read from the checkpointer's channel values (the + authoritative source) and serialized via + :func:`~deerflow.runtime.serialization.serialize_channel_values`. + Only the latest (first) checkpoint carries the ``messages`` key to + avoid duplicating them across every entry. + """ checkpointer = get_checkpointer(request) config: dict[str, Any] = {"configurable": {"thread_id": thread_id}} @@ -647,6 +564,7 @@ async def get_thread_history(thread_id: str, body: ThreadHistoryRequest, request config["configurable"]["checkpoint_id"] = body.before entries: list[HistoryEntry] = [] + is_latest_checkpoint = True try: async for checkpoint_tuple in checkpointer.alist(config, limit=body.limit): ckpt_config = getattr(checkpoint_tuple, "config", {}) @@ -661,22 +579,42 @@ async def get_thread_history(thread_id: str, body: ThreadHistoryRequest, request channel_values = checkpoint.get("channel_values", {}) + # Build values from checkpoint channel_values + values: dict[str, Any] = {} + if title := channel_values.get("title"): + values["title"] = title + if thread_data := channel_values.get("thread_data"): + values["thread_data"] = thread_data + + # Attach messages only to the latest checkpoint entry. + if is_latest_checkpoint: + messages = channel_values.get("messages") + if messages: + values["messages"] = serialize_channel_values({"messages": messages}).get("messages", []) + is_latest_checkpoint = False + # Derive next tasks tasks_raw = getattr(checkpoint_tuple, "tasks", []) or [] next_tasks = [t.name for t in tasks_raw if hasattr(t, "name")] + # Strip LangGraph internal keys from metadata + user_meta = {k: v for k, v in metadata.items() if k not in ("created_at", "updated_at", "step", "source", "writes", "parents")} + # Keep step for ordering context + if "step" in metadata: + user_meta["step"] = metadata["step"] + entries.append( HistoryEntry( checkpoint_id=checkpoint_id, parent_checkpoint_id=parent_id, - metadata=metadata, - values=serialize_channel_values(channel_values), + metadata=user_meta, + values=values, created_at=str(metadata.get("created_at", "")), next=next_tasks, ) ) except Exception: - logger.exception("Failed to get history for thread %s", thread_id) + logger.exception("Failed to get history for thread %s", sanitize_log_param(thread_id)) raise HTTPException(status_code=500, detail="Failed to get thread history") return entries diff --git a/backend/app/gateway/routers/uploads.py b/backend/app/gateway/routers/uploads.py index 6f8be52a1..0ecc2266a 100644 --- a/backend/app/gateway/routers/uploads.py +++ b/backend/app/gateway/routers/uploads.py @@ -4,11 +4,14 @@ import logging import os import stat -from fastapi import APIRouter, File, HTTPException, UploadFile +from fastapi import APIRouter, Depends, File, HTTPException, Request, UploadFile from pydantic import BaseModel -from deerflow.config.app_config import get_app_config +from app.gateway.authz import require_permission +from app.gateway.deps import get_config +from deerflow.config.app_config import AppConfig from deerflow.config.paths import get_paths +from deerflow.runtime.user_context import get_effective_user_id from deerflow.sandbox.sandbox_provider import SandboxProvider, get_sandbox_provider from deerflow.uploads.manager import ( PathTraversalError, @@ -58,23 +61,22 @@ def _uses_thread_data_mounts(sandbox_provider: SandboxProvider) -> bool: return bool(getattr(sandbox_provider, "uses_thread_data_mounts", False)) -def _get_uploads_config_value(key: str, default: object) -> object: +def _get_uploads_config_value(app_config: AppConfig, key: str, default: object) -> object: """Read a value from the uploads config, supporting dict and attribute access.""" - cfg = get_app_config() - uploads_cfg = getattr(cfg, "uploads", None) + uploads_cfg = getattr(app_config, "uploads", None) if isinstance(uploads_cfg, dict): return uploads_cfg.get(key, default) return getattr(uploads_cfg, key, default) -def _auto_convert_documents_enabled() -> bool: +def _auto_convert_documents_enabled(app_config: AppConfig) -> bool: """Return whether automatic host-side document conversion is enabled. The secure default is disabled unless an operator explicitly opts in via uploads.auto_convert_documents in config.yaml. """ try: - raw = _get_uploads_config_value("auto_convert_documents", False) + raw = _get_uploads_config_value(app_config, "auto_convert_documents", False) if isinstance(raw, str): return raw.strip().lower() in {"1", "true", "yes", "on"} return bool(raw) @@ -83,9 +85,12 @@ def _auto_convert_documents_enabled() -> bool: @router.post("", response_model=UploadResponse) +@require_permission("threads", "write", owner_check=True, require_existing=False) async def upload_files( thread_id: str, + request: Request, files: list[UploadFile] = File(...), + config: AppConfig = Depends(get_config), ) -> UploadResponse: """Upload multiple files to a thread's uploads directory.""" if not files: @@ -95,7 +100,7 @@ async def upload_files( uploads_dir = ensure_uploads_dir(thread_id) except ValueError as e: raise HTTPException(status_code=400, detail=str(e)) - sandbox_uploads = get_paths().sandbox_uploads_dir(thread_id) + sandbox_uploads = get_paths().sandbox_uploads_dir(thread_id, user_id=get_effective_user_id()) uploaded_files = [] sandbox_provider = get_sandbox_provider() @@ -104,7 +109,7 @@ async def upload_files( if sync_to_sandbox: sandbox_id = sandbox_provider.acquire(thread_id) sandbox = sandbox_provider.get(sandbox_id) - auto_convert_documents = _auto_convert_documents_enabled() + auto_convert_documents = _auto_convert_documents_enabled(config) for file in files: if not file.filename: @@ -166,7 +171,8 @@ async def upload_files( @router.get("/list", response_model=dict) -async def list_uploaded_files(thread_id: str) -> dict: +@require_permission("threads", "read", owner_check=True) +async def list_uploaded_files(thread_id: str, request: Request) -> dict: """List all files in a thread's uploads directory.""" try: uploads_dir = get_uploads_dir(thread_id) @@ -176,7 +182,7 @@ async def list_uploaded_files(thread_id: str) -> dict: enrich_file_listing(result, thread_id) # Gateway additionally includes the sandbox-relative path. - sandbox_uploads = get_paths().sandbox_uploads_dir(thread_id) + sandbox_uploads = get_paths().sandbox_uploads_dir(thread_id, user_id=get_effective_user_id()) for f in result["files"]: f["path"] = str(sandbox_uploads / f["filename"]) @@ -184,7 +190,8 @@ async def list_uploaded_files(thread_id: str) -> dict: @router.delete("/{filename}") -async def delete_uploaded_file(thread_id: str, filename: str) -> dict: +@require_permission("threads", "delete", owner_check=True, require_existing=True) +async def delete_uploaded_file(thread_id: str, filename: str, request: Request) -> dict: """Delete a file from a thread's uploads directory.""" try: uploads_dir = get_uploads_dir(thread_id) diff --git a/backend/app/gateway/services.py b/backend/app/gateway/services.py index 7dc22a9ef..634b8b9d1 100644 --- a/backend/app/gateway/services.py +++ b/backend/app/gateway/services.py @@ -11,13 +11,14 @@ import asyncio import json import logging import re -import time +from collections.abc import Mapping from typing import Any from fastapi import HTTPException, Request from langchain_core.messages import HumanMessage -from app.gateway.deps import get_checkpointer, get_run_manager, get_store, get_stream_bridge +from app.gateway.deps import get_run_context, get_run_manager, get_stream_bridge +from app.gateway.utils import sanitize_log_param from deerflow.runtime import ( END_SENTINEL, HEARTBEAT_SENTINEL, @@ -97,13 +98,52 @@ def normalize_input(raw_input: dict[str, Any] | None) -> dict[str, Any]: _DEFAULT_ASSISTANT_ID = "lead_agent" +# Whitelist of run-context keys that the langgraph-compat layer forwards from +# ``body.context`` into the run config. ``config["context"]`` exists in +# LangGraph >=0.6, but these values must be written to both ``configurable`` +# (for legacy ``_get_runtime_config`` consumers) and ``context`` because +# LangGraph >=1.1.9 no longer makes ``ToolRuntime.context`` fall back to +# ``configurable`` for consumers like ``setup_agent``. +_CONTEXT_CONFIGURABLE_KEYS: frozenset[str] = frozenset( + { + "model_name", + "mode", + "thinking_enabled", + "reasoning_effort", + "is_plan_mode", + "subagent_enabled", + "max_concurrent_subagents", + "agent_name", + "is_bootstrap", + } +) + + +def merge_run_context_overrides(config: dict[str, Any], context: Mapping[str, Any] | None) -> None: + """Merge whitelisted keys from ``body.context`` into both ``config['configurable']`` + and ``config['context']`` so they are visible to legacy configurable readers and + to LangGraph ``ToolRuntime.context`` consumers (e.g. the ``setup_agent`` tool — + see issue #2677).""" + if not context: + return + configurable = config.setdefault("configurable", {}) + runtime_context = config.setdefault("context", {}) + for key in _CONTEXT_CONFIGURABLE_KEYS: + if key in context: + if isinstance(configurable, dict): + configurable.setdefault(key, context[key]) + if isinstance(runtime_context, dict): + runtime_context.setdefault(key, context[key]) + + def resolve_agent_factory(assistant_id: str | None): """Resolve the agent factory callable from config. Custom agents are implemented as ``lead_agent`` + an ``agent_name`` - injected into ``configurable`` — see :func:`build_run_config`. All - ``assistant_id`` values therefore map to the same factory; the routing - happens inside ``make_lead_agent`` when it reads ``cfg["agent_name"]``. + injected into ``configurable`` or ``context`` — see + :func:`build_run_config`. All ``assistant_id`` values therefore map to the + same factory; the routing happens inside ``make_lead_agent`` when it reads + ``cfg["agent_name"]``. """ from deerflow.agents.lead_agent.agent import make_lead_agent @@ -120,10 +160,12 @@ def build_run_config( """Build a RunnableConfig dict for the agent. When *assistant_id* refers to a custom agent (anything other than - ``"lead_agent"`` / ``None``), the name is forwarded as - ``configurable["agent_name"]``. ``make_lead_agent`` reads this key to - load the matching ``agents//SOUL.md`` and per-agent config — - without it the agent silently runs as the default lead agent. + ``"lead_agent"`` / ``None``), the name is forwarded as ``agent_name`` in + whichever runtime options container is active: ``context`` for + LangGraph >= 0.6.0 requests, otherwise ``configurable``. + ``make_lead_agent`` reads this key to load the matching + ``agents//SOUL.md`` and per-agent config — without it the agent + silently runs as the default lead agent. This mirrors the channel manager's ``_resolve_run_params`` logic so that the LangGraph Platform-compatible HTTP API and the IM channel path behave @@ -142,7 +184,14 @@ def build_run_config( thread_id, list(request_config.get("configurable", {}).keys()), ) - config["context"] = request_config["context"] + context_value = request_config["context"] + if context_value is None: + context = {} + elif isinstance(context_value, Mapping): + context = dict(context_value) + else: + raise ValueError("request config 'context' must be a mapping or null.") + config["context"] = context else: configurable = {"thread_id": thread_id} configurable.update(request_config.get("configurable", {})) @@ -154,13 +203,19 @@ def build_run_config( config["configurable"] = {"thread_id": thread_id} # Inject custom agent name when the caller specified a non-default assistant. - # Honour an explicit configurable["agent_name"] in the request if already set. - if assistant_id and assistant_id != _DEFAULT_ASSISTANT_ID and "configurable" in config: - if "agent_name" not in config["configurable"]: - normalized = assistant_id.strip().lower().replace("_", "-") - if not normalized or not re.fullmatch(r"[a-z0-9-]+", normalized): - raise ValueError(f"Invalid assistant_id {assistant_id!r}: must contain only letters, digits, and hyphens after normalization.") - config["configurable"]["agent_name"] = normalized + # Honour an explicit agent_name in the active runtime options container. + if assistant_id and assistant_id != _DEFAULT_ASSISTANT_ID: + normalized = assistant_id.strip().lower().replace("_", "-") + if not normalized or not re.fullmatch(r"[a-z0-9-]+", normalized): + raise ValueError(f"Invalid assistant_id {assistant_id!r}: must contain only letters, digits, and hyphens after normalization.") + if "configurable" in config: + target = config["configurable"] + elif "context" in config: + target = config["context"] + else: + target = config.setdefault("configurable", {}) + if target is not None and "agent_name" not in target: + target["agent_name"] = normalized if metadata: config.setdefault("metadata", {}).update(metadata) return config @@ -171,71 +226,6 @@ def build_run_config( # --------------------------------------------------------------------------- -async def _upsert_thread_in_store(store, thread_id: str, metadata: dict | None) -> None: - """Create or refresh the thread record in the Store. - - Called from :func:`start_run` so that threads created via the stateless - ``/runs/stream`` endpoint (which never calls ``POST /threads``) still - appear in ``/threads/search`` results. - """ - # Deferred import to avoid circular import with the threads router module. - from app.gateway.routers.threads import _store_upsert - - try: - await _store_upsert(store, thread_id, metadata=metadata) - except Exception: - logger.warning("Failed to upsert thread %s in store (non-fatal)", thread_id) - - -async def _sync_thread_title_after_run( - run_task: asyncio.Task, - thread_id: str, - checkpointer: Any, - store: Any, -) -> None: - """Wait for *run_task* to finish, then persist the generated title to the Store. - - TitleMiddleware writes the generated title to the LangGraph agent state - (checkpointer) but the Gateway's Store record is not updated automatically. - This coroutine closes that gap by reading the final checkpoint after the - run completes and syncing ``values.title`` into the Store record so that - subsequent ``/threads/search`` responses include the correct title. - - Runs as a fire-and-forget :func:`asyncio.create_task`; failures are - logged at DEBUG level and never propagate. - """ - # Wait for the background run task to complete (any outcome). - # asyncio.wait does not propagate task exceptions — it just returns - # when the task is done, cancelled, or failed. - await asyncio.wait({run_task}) - - # Deferred import to avoid circular import with the threads router module. - from app.gateway.routers.threads import _store_get, _store_put - - try: - ckpt_config = {"configurable": {"thread_id": thread_id, "checkpoint_ns": ""}} - ckpt_tuple = await checkpointer.aget_tuple(ckpt_config) - if ckpt_tuple is None: - return - - channel_values = ckpt_tuple.checkpoint.get("channel_values", {}) - title = channel_values.get("title") - if not title: - return - - existing = await _store_get(store, thread_id) - if existing is None: - return - - updated = dict(existing) - updated.setdefault("values", {})["title"] = title - updated["updated_at"] = time.time() - await _store_put(store, updated) - logger.debug("Synced title %r for thread %s", title, thread_id) - except Exception: - logger.debug("Failed to sync title for thread %s (non-fatal)", thread_id, exc_info=True) - - async def start_run( body: Any, thread_id: str, @@ -255,8 +245,7 @@ async def start_run( """ bridge = get_stream_bridge(request) run_mgr = get_run_manager(request) - checkpointer = get_checkpointer(request) - store = get_store(request) + run_ctx = get_run_context(request) disconnect = DisconnectMode.cancel if body.on_disconnect == "cancel" else DisconnectMode.continue_ @@ -274,37 +263,31 @@ async def start_run( except UnsupportedStrategyError as exc: raise HTTPException(status_code=501, detail=str(exc)) from exc - # Ensure the thread is visible in /threads/search, even for threads that - # were never explicitly created via POST /threads (e.g. stateless runs). - store = get_store(request) - if store is not None: - await _upsert_thread_in_store(store, thread_id, body.metadata) + # Upsert thread metadata so the thread appears in /threads/search, + # even for threads that were never explicitly created via POST /threads + # (e.g. stateless runs). + try: + existing = await run_ctx.thread_store.get(thread_id) + if existing is None: + await run_ctx.thread_store.create( + thread_id, + assistant_id=body.assistant_id, + metadata=body.metadata, + ) + else: + await run_ctx.thread_store.update_status(thread_id, "running") + except Exception: + logger.warning("Failed to upsert thread_meta for %s (non-fatal)", sanitize_log_param(thread_id)) agent_factory = resolve_agent_factory(body.assistant_id) graph_input = normalize_input(body.input) config = build_run_config(thread_id, body.config, body.metadata, assistant_id=body.assistant_id) - # Merge DeerFlow-specific context overrides into configurable. + # Merge DeerFlow-specific context overrides into both ``configurable`` and ``context``. # The ``context`` field is a custom extension for the langgraph-compat layer # that carries agent configuration (model_name, thinking_enabled, etc.). # Only agent-relevant keys are forwarded; unknown keys (e.g. thread_id) are ignored. - context = getattr(body, "context", None) - if context: - _CONTEXT_CONFIGURABLE_KEYS = { - "model_name", - "mode", - "thinking_enabled", - "reasoning_effort", - "is_plan_mode", - "subagent_enabled", - "max_concurrent_subagents", - "agent_name", - "is_bootstrap", - } - configurable = config.setdefault("configurable", {}) - for key in _CONTEXT_CONFIGURABLE_KEYS: - if key in context: - configurable.setdefault(key, context[key]) + merge_run_context_overrides(config, getattr(body, "context", None)) stream_modes = normalize_stream_modes(body.stream_mode) @@ -313,8 +296,7 @@ async def start_run( bridge, run_mgr, record, - checkpointer=checkpointer, - store=store, + ctx=run_ctx, agent_factory=agent_factory, graph_input=graph_input, config=config, @@ -326,11 +308,9 @@ async def start_run( ) record.task = task - # After the run completes, sync the title generated by TitleMiddleware from - # the checkpointer into the Store record so that /threads/search returns the - # correct title instead of an empty values dict. - if store is not None: - asyncio.create_task(_sync_thread_title_after_run(task, thread_id, checkpointer, store)) + # Title sync is handled by worker.py's finally block which reads the + # title from the checkpoint and calls thread_store.update_display_name + # after the run completes. return record diff --git a/backend/app/gateway/utils.py b/backend/app/gateway/utils.py new file mode 100644 index 000000000..8368d84fc --- /dev/null +++ b/backend/app/gateway/utils.py @@ -0,0 +1,6 @@ +"""Shared utility helpers for the Gateway layer.""" + + +def sanitize_log_param(value: str) -> str: + """Strip control characters to prevent log injection.""" + return value.replace("\n", "").replace("\r", "").replace("\x00", "") diff --git a/backend/debug.py b/backend/debug.py index f558d1d71..341c676ed 100644 --- a/backend/debug.py +++ b/backend/debug.py @@ -19,24 +19,70 @@ import asyncio import logging from dotenv import load_dotenv -from langchain_core.messages import HumanMessage -from deerflow.agents import make_lead_agent +try: + from prompt_toolkit import PromptSession + from prompt_toolkit.history import InMemoryHistory + + _HAS_PROMPT_TOOLKIT = True +except ImportError: + _HAS_PROMPT_TOOLKIT = False load_dotenv() -logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", -) +_LOG_FMT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" +_LOG_DATEFMT = "%Y-%m-%d %H:%M:%S" + + +def _setup_logging(log_level: int = logging.INFO) -> None: + """Route logs to ``debug.log`` using *log_level* for the initial root/file setup. + + This configures the root logger and the ``debug.log`` file handler so logs do + not print on the interactive console. It is idempotent: any pre-existing + handlers on the root logger (e.g. installed by ``logging.basicConfig`` in + transitively imported modules) are removed so the debug session output only + lands in ``debug.log``. + + Note: later config-driven logging adjustments may change named logger + verbosity without raising the root logger or file-handler thresholds set + here, so the eventual contents of ``debug.log`` may not be filtered solely by + this function's ``log_level`` argument. + """ + root = logging.root + for h in list(root.handlers): + root.removeHandler(h) + h.close() + root.setLevel(log_level) + + file_handler = logging.FileHandler("debug.log", mode="a", encoding="utf-8") + file_handler.setLevel(log_level) + file_handler.setFormatter(logging.Formatter(_LOG_FMT, datefmt=_LOG_DATEFMT)) + root.addHandler(file_handler) async def main(): + # Install file logging first so warnings emitted while loading config do not + # leak onto the interactive terminal via Python's lastResort handler. + _setup_logging() + + from deerflow.config import get_app_config + from deerflow.config.app_config import apply_logging_level + + app_config = get_app_config() + apply_logging_level(app_config.log_level) + + # Delay the rest of the deerflow imports until *after* logging is installed + # so that any import-time side effects (e.g. deerflow.agents starts a + # background skill-loader thread on import) emit logs to debug.log instead + # of leaking onto the interactive terminal via Python's lastResort handler. + from langchain_core.messages import HumanMessage + from langgraph.runtime import Runtime + + from deerflow.agents import make_lead_agent + from deerflow.mcp import initialize_mcp_tools + # Initialize MCP tools at startup try: - from deerflow.mcp import initialize_mcp_tools - await initialize_mcp_tools() except Exception as e: print(f"Warning: Failed to initialize MCP tools: {e}") @@ -52,16 +98,27 @@ async def main(): } } + runtime = Runtime(context={"thread_id": config["configurable"]["thread_id"]}) + config["configurable"]["__pregel_runtime"] = runtime + agent = make_lead_agent(config) + session = PromptSession(history=InMemoryHistory()) if _HAS_PROMPT_TOOLKIT else None + print("=" * 50) print("Lead Agent Debug Mode") print("Type 'quit' or 'exit' to stop") + print(f"Logs: debug.log (log_level={app_config.log_level})") + if not _HAS_PROMPT_TOOLKIT: + print("Tip: `uv sync --group dev` to enable arrow-key & history support") print("=" * 50) while True: try: - user_input = input("\nYou: ").strip() + if session: + user_input = (await session.prompt_async("\nYou: ")).strip() + else: + user_input = input("\nYou: ").strip() if not user_input: continue if user_input.lower() in ("quit", "exit"): @@ -70,15 +127,15 @@ async def main(): # Invoke the agent state = {"messages": [HumanMessage(content=user_input)]} - result = await agent.ainvoke(state, config=config, context={"thread_id": "debug-thread-001"}) + result = await agent.ainvoke(state, config=config) # Print the response if result.get("messages"): last_message = result["messages"][-1] print(f"\nAgent: {last_message.content}") - except KeyboardInterrupt: - print("\nInterrupted. Goodbye!") + except (KeyboardInterrupt, EOFError): + print("\nGoodbye!") break except Exception as e: print(f"\nError: {e}") diff --git a/backend/docs/AUTH_TEST_DOCKER_GAP.md b/backend/docs/AUTH_TEST_DOCKER_GAP.md new file mode 100644 index 000000000..adf4916a3 --- /dev/null +++ b/backend/docs/AUTH_TEST_DOCKER_GAP.md @@ -0,0 +1,77 @@ +# Docker Test Gap (Section 七 7.4) + +This file documents the only **un-executed** test cases from +`backend/docs/AUTH_TEST_PLAN.md` after the full release validation pass. + +## Why this gap exists + +The release validation environment (sg_dev: `10.251.229.92`) **does not have +a Docker daemon installed**. The TC-DOCKER cases are container-runtime +behavior tests that need an actual Docker engine to spin up +`docker/docker-compose.yaml` services. + +```bash +$ ssh sg_dev "which docker; docker --version" +# (empty) +# bash: docker: command not found +``` + +All other test plan sections were executed against either: +- The local dev box (Mac, all services running locally), or +- The deployed sg_dev instance (gateway + frontend + nginx via SSH tunnel) + +## Cases not executed + +| Case | Title | What it covers | Why not run | +|---|---|---|---| +| TC-DOCKER-01 | `users.db` volume persistence | Verify the `DEER_FLOW_HOME` bind mount survives container restart | needs `docker compose up` | +| TC-DOCKER-02 | Session persistence across container restart | `AUTH_JWT_SECRET` env var keeps cookies valid after `docker compose down && up` | needs `docker compose down/up` | +| TC-DOCKER-03 | Per-worker rate limiter divergence | Confirms in-process `_login_attempts` dict doesn't share state across `gunicorn` workers (4 by default in the compose file); known limitation, documented | needs multi-worker container | +| TC-DOCKER-04 | IM channels skip AuthMiddleware | Verify Feishu/Slack/Telegram dispatchers run in-container against `http://langgraph:2024` without going through nginx | needs `docker logs` | +| TC-DOCKER-05 | Admin credentials surfacing | **Updated post-simplify** — was "log scrape", now "0600 credential file in `DEER_FLOW_HOME`". The file-based behavior is already validated by TC-1.1 + TC-UPG-13 on sg_dev (non-Docker), so the only Docker-specific gap is verifying the volume mount carries the file out to the host | needs container + host volume | +| TC-DOCKER-06 | Gateway-mode Docker deploy | `./scripts/deploy.sh --gateway` produces a 3-container topology (no `langgraph` container); same auth flow as standard mode | needs `docker compose --profile gateway` | + +## Coverage already provided by non-Docker tests + +The **auth-relevant** behavior in each Docker case is already exercised by +the test cases that ran on sg_dev or local: + +| Docker case | Auth behavior covered by | +|---|---| +| TC-DOCKER-01 (volume persistence) | TC-REENT-01 on sg_dev (admin row survives gateway restart) — same SQLite file, just no container layer between | +| TC-DOCKER-02 (session persistence) | TC-API-02/03/06 (cookie roundtrip), plus TC-REENT-04 (multi-cookie) — JWT verification is process-state-free, container restart is equivalent to `pkill uvicorn && uv run uvicorn` | +| TC-DOCKER-03 (per-worker rate limit) | TC-GW-04 + TC-REENT-09 (single-worker rate limit + 5min expiry). The cross-worker divergence is an architectural property of the in-memory dict; no auth code path differs | +| TC-DOCKER-04 (IM channels skip auth) | Code-level only: `app/channels/manager.py` uses `langgraph_sdk` directly with no cookie handling. The langgraph_auth handler is bypassed by going through SDK, not HTTP | +| TC-DOCKER-05 (credential surfacing) | TC-1.1 on sg_dev (file at `~/deer-flow/backend/.deer-flow/admin_initial_credentials.txt`, mode 0600, password 22 chars) — the only Docker-unique step is whether the bind mount projects this path onto the host, which is a `docker compose` config check, not a runtime behavior change | +| TC-DOCKER-06 (gateway-mode container) | Section 七 7.2 covered by TC-GW-01..05 + Section 二 (gateway-mode auth flow on sg_dev) — same Gateway code, container is just a packaging change | + +## Reproduction steps when Docker becomes available + +Anyone with `docker` + `docker compose` installed can reproduce the gap by +running the test plan section verbatim. Pre-flight: + +```bash +# Required on the host +docker --version # >=24.x +docker compose version # plugin >=2.x + +# Required env var (otherwise sessions reset on every container restart) +echo "AUTH_JWT_SECRET=$(python3 -c 'import secrets; print(secrets.token_urlsafe(32))')" \ + >> .env + +# Optional: pin DEER_FLOW_HOME to a stable host path +echo "DEER_FLOW_HOME=$HOME/deer-flow-data" >> .env +``` + +Then run TC-DOCKER-01..06 from the test plan as written. + +## Decision log + +- **Not blocking the release.** The auth-relevant behavior in every Docker + case has an already-validated equivalent on bare metal. The gap is purely + about *container packaging* details (bind mounts, multi-worker, log + collection), not about whether the auth code paths work. +- **TC-DOCKER-05 was updated in place** in `AUTH_TEST_PLAN.md` to reflect + the post-simplify reality (credentials file → 0600 file, no log leak). + The old "grep 'Password:' in docker logs" expectation would have failed + silently and given a false sense of coverage. diff --git a/backend/docs/AUTH_TEST_PLAN.md b/backend/docs/AUTH_TEST_PLAN.md new file mode 100644 index 000000000..15b20494a --- /dev/null +++ b/backend/docs/AUTH_TEST_PLAN.md @@ -0,0 +1,1801 @@ +# Auth 模块测试计划 + +## 测试矩阵 + +| 模式 | 启动命令 | Auth 层 | 端口 | +|------|---------|---------|------| +| 标准模式 | `make dev` | Gateway AuthMiddleware + LangGraph auth | 2026 (nginx) | +| Gateway 模式 | `make dev-pro` | Gateway AuthMiddleware(全量) | 2026 (nginx) | +| 直连 Gateway | `cd backend && make gateway` | Gateway AuthMiddleware | 8001 | +| 直连 LangGraph | `cd backend && make dev` | LangGraph auth | 2024 | + +每种模式下都需执行以下测试。 + +--- + +## 一、环境准备 + +### 1.1 首次启动(干净数据库) + +```bash +# 清除已有数据 +rm -f backend/.deer-flow/users.db + +# 选择模式启动 +make dev # 标准模式 +# 或 +make dev-pro # Gateway 模式 +``` + +**验证点:** +- [ ] 控制台输出 admin 邮箱和随机密码 +- [ ] 密码格式为 `secrets.token_urlsafe(16)` 的 22 字符字符串 +- [ ] 邮箱为 `admin@deerflow.dev` +- [ ] 提示 `Change it after login: Settings -> Account` + +### 1.2 非首次启动 + +```bash +# 不清除数据库,直接启动 +make dev +``` + +**验证点:** +- [ ] 控制台不输出密码 +- [ ] 如果 admin 仍 `needs_setup=True`,控制台有 warning 提示 + +### 1.3 环境变量配置 + +| 变量 | 验证 | +|------|------| +| `AUTH_JWT_SECRET` 未设 | 启动时 warning,自动生成临时密钥 | +| `AUTH_JWT_SECRET` 已设 | 无 warning,重启后 session 保持 | + +--- + +## 二、接口流程测试 + +> 以下用 `BASE=http://localhost:2026` 为例。标准模式和 Gateway 模式都用此地址。 +> 直连测试替换为对应端口。 +> +> **CSRF token 提取**:多处用到从 cookie jar 提取 CSRF token,统一使用: +> ```bash +> CSRF=$(python3 -c " +> import http.cookiejar +> cj = http.cookiejar.MozillaCookieJar('cookies.txt'); cj.load() +> print(next(c.value for c in cj if c.name == 'csrf_token')) +> ") +> ``` +> 或简写(多数场景够用):`CSRF=$(grep csrf_token cookies.txt | awk '{print $NF}')` + +### 2.1 注册 + 登录 + 会话 + +#### TC-API-01: Setup 状态查询 + +```bash +curl -s $BASE/api/v1/auth/setup-status | jq . +``` + +**预期:** 返回 `{"needs_setup": false}`(admin 在启动时已自动创建,`count_users() > 0`)。仅在启动完成前的极短窗口内可能返回 `true`。 + +#### TC-API-02: Admin 首次登录 + +```bash +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@deerflow.dev&password=<控制台密码>" \ + -c cookies.txt | jq . +``` + +**预期:** +- 状态码 200 +- Body: `{"expires_in": 604800, "needs_setup": true}` +- `cookies.txt` 包含 `access_token`(HttpOnly)和 `csrf_token`(非 HttpOnly) + +#### TC-API-03: 获取当前用户 + +```bash +curl -s $BASE/api/v1/auth/me -b cookies.txt | jq . +``` + +**预期:** `{"id": "...", "email": "admin@deerflow.dev", "system_role": "admin", "needs_setup": true}` + +#### TC-API-04: Setup 流程(改邮箱 + 改密码) + +```bash +CSRF=$(grep csrf_token cookies.txt | awk '{print $NF}') +curl -s -X POST $BASE/api/v1/auth/change-password \ + -b cookies.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: $CSRF" \ + -d '{"current_password":"<控制台密码>","new_password":"NewPass123!","new_email":"admin@example.com"}' | jq . +``` + +**预期:** +- 状态码 200 +- `{"message": "Password changed successfully"}` +- 再调 `/auth/me` 邮箱变为 `admin@example.com`,`needs_setup` 变为 `false` + +#### TC-API-05: 普通用户注册 + +```bash +curl -s -X POST $BASE/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"user1@example.com","password":"UserPass1!"}' \ + -c user_cookies.txt | jq . +``` + +**预期:** 状态码 201,`system_role` 为 `"user"`,自动登录(cookie 已设) + +#### TC-API-06: 登出 + +```bash +curl -s -X POST $BASE/api/v1/auth/logout -b cookies.txt | jq . +``` + +**预期:** `{"message": "Successfully logged out"}`,后续用 cookies.txt 访问 `/auth/me` 返回 401 + +### 2.2 多租户隔离 + +#### TC-API-07: 用户 A 创建 Thread + +```bash +# 以 user1 登录 +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=user1@example.com&password=UserPass1!" \ + -c user1.txt + +CSRF1=$(grep csrf_token user1.txt | awk '{print $NF}') + +# 创建 thread +curl -s -X POST $BASE/api/threads \ + -b user1.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: $CSRF1" \ + -d '{"metadata":{}}' | jq .thread_id +# 记录 THREAD_ID +``` + +#### TC-API-08: 用户 B 无法访问用户 A 的 Thread + +```bash +# 注册并登录 user2 +curl -s -X POST $BASE/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"user2@example.com","password":"UserPass2!"}' \ + -c user2.txt + +# 尝试访问 user1 的 thread +curl -s $BASE/api/threads/$THREAD_ID -b user2.txt +``` + +**预期:** 状态码 404(不是 403,避免泄露 thread 存在性) + +#### TC-API-09: 用户 B 搜索 Thread 看不到用户 A 的 + +```bash +CSRF2=$(grep csrf_token user2.txt | awk '{print $NF}') +curl -s -X POST $BASE/api/threads/search \ + -b user2.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: $CSRF2" \ + -d '{}' | jq length +``` + +**预期:** 返回 0 或仅包含 user2 自己的 thread + +### 2.3 标准模式 LangGraph Server 隔离 + +> 仅在标准模式下测试。Gateway 模式不跑 LangGraph Server。 + +#### TC-API-10: LangGraph 端点需要 cookie + +```bash +# 不带 cookie 访问 LangGraph 接口 +curl -s -w "%{http_code}" $BASE/api/langgraph/threads +``` + +**预期:** 401 + +#### TC-API-11: LangGraph 带 cookie 可访问 + +```bash +curl -s $BASE/api/langgraph/threads -b user1.txt | jq length +``` + +**预期:** 200,返回 user1 的 thread 列表 + +#### TC-API-12: LangGraph 隔离 — 用户只看到自己的 + +```bash +# user2 查 LangGraph threads +curl -s $BASE/api/langgraph/threads -b user2.txt | jq length +``` + +**预期:** 不包含 user1 的 thread + +### 2.4 Token 失效 + +#### TC-API-13: 改密码后旧 token 立即失效 + +```bash +# 保存当前 cookie +cp user1.txt user1_old.txt + +# 改密码 +CSRF1=$(grep csrf_token user1.txt | awk '{print $NF}') +curl -s -X POST $BASE/api/v1/auth/change-password \ + -b user1.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: $CSRF1" \ + -d '{"current_password":"UserPass1!","new_password":"NewUserPass1!"}' \ + -c user1.txt + +# 用旧 cookie 访问 +curl -s -w "%{http_code}" $BASE/api/v1/auth/me -b user1_old.txt +``` + +**预期:** 401(token_version 不匹配) + +#### TC-API-14: 改密码后新 cookie 可用 + +```bash +curl -s $BASE/api/v1/auth/me -b user1.txt | jq .email +``` + +**预期:** 200,返回用户信息 + +### 2.5 错误响应格式 + +#### TC-API-15: 结构化错误响应 + +```bash +# 错误密码登录 +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=wrong" | jq .detail +``` + +**预期:** +```json +{"code": "invalid_credentials", "message": "Incorrect email or password"} +``` + +#### TC-API-16: 重复邮箱注册 + +```bash +curl -s -X POST $BASE/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"user1@example.com","password":"AnyPass123"}' -w "\n%{http_code}" +``` + +**预期:** 400,`{"code": "email_already_exists", ...}` + +--- + +## 三、攻击测试 + +### 3.1 暴力破解防护 + +#### TC-ATK-01: IP 限速 + +```bash +# 连续 6 次错误密码 +for i in $(seq 1 6); do + echo "Attempt $i:" + curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=wrong$i" -w " HTTP %{http_code}\n" +done +``` + +**预期:** 前 5 次返回 401,第 6 次返回 429 `"Too many login attempts. Try again later."` + +#### TC-ATK-02: 限速后正确密码也被拒 + +```bash +# 紧接上一步 +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" -w " HTTP %{http_code}\n" +``` + +**预期:** 429(锁定 5 分钟) + +#### TC-ATK-03: 成功登录清除限速 + +```bash +# 等待锁定过期后(或重启服务),用正确密码登录 +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" -w " HTTP %{http_code}\n" +``` + +**预期:** 200,计数器重置 + +### 3.2 CSRF 防护 + +#### TC-ATK-04: 无 CSRF token 的 POST 请求 + +```bash +curl -s -X POST $BASE/api/threads \ + -b user1.txt \ + -H "Content-Type: application/json" \ + -d '{"metadata":{}}' -w "\nHTTP %{http_code}" +``` + +**预期:** 403 `"CSRF token missing"` + +#### TC-ATK-05: 错误 CSRF token + +```bash +curl -s -X POST $BASE/api/threads \ + -b user1.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: fake-token" \ + -d '{"metadata":{}}' -w "\nHTTP %{http_code}" +``` + +**预期:** 403 `"CSRF token mismatch"` + +### 3.3 Cookie 安全 + +> HTTP 与 HTTPS 行为差异通过 `X-Forwarded-Proto: https` 模拟。 +> **注意:** 经 nginx 代理时,nginx 的 `proxy_set_header X-Forwarded-Proto $scheme` 会覆盖 +> 客户端发的值(`$scheme` = nginx 监听端口的 scheme),因此 HTTPS 模拟必须**直连 Gateway(端口 8001)**。 +> 每个 case 需在 **login** 和 **register** 两个端点各验证一次。 + +#### TC-ATK-06: HTTP 模式 Cookie 属性 + +```bash +# 登录 +curl -s -D - -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" 2>/dev/null | grep -i set-cookie +``` + +**预期:** +- `access_token`: `HttpOnly; Path=/; SameSite=lax`,无 `Secure`,无 `Max-Age` +- `csrf_token`: `Path=/; SameSite=strict`,无 `HttpOnly`(JS 需要读取),无 `Secure` + +```bash +# 注册 +curl -s -D - -X POST $BASE/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"cookie-http@example.com","password":"CookieTest1!"}' 2>/dev/null | grep -i set-cookie +``` + +**预期:** 同上 + +#### TC-ATK-07: HTTPS 模式 Cookie 属性 + +> **必须直连 Gateway**(`GW=http://localhost:8001`),经 nginx 会被 `$scheme` 覆盖。 + +```bash +GW=http://localhost:8001 + +# 登录(模拟 HTTPS) +curl -s -D - -X POST $GW/api/v1/auth/login/local \ + -H "X-Forwarded-Proto: https" \ + -d "username=admin@example.com&password=正确密码" 2>/dev/null | grep -i set-cookie +``` + +**预期:** +- `access_token`: `HttpOnly; Secure; Path=/; SameSite=lax; Max-Age=604800` +- `csrf_token`: `Secure; Path=/; SameSite=strict`,无 `HttpOnly` + +```bash +# 注册(模拟 HTTPS) +curl -s -D - -X POST $GW/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -H "X-Forwarded-Proto: https" \ + -d '{"email":"cookie-https@example.com","password":"CookieTest1!"}' 2>/dev/null | grep -i set-cookie +``` + +**预期:** 同上 + +#### TC-ATK-07a: HTTP/HTTPS 差异对比 + +> 直连 Gateway 执行,避免 nginx 覆盖 `X-Forwarded-Proto`。 + +```bash +GW=http://localhost:8001 + +for proto in "" "https"; do + HEADER="" + LABEL="HTTP" + if [ -n "$proto" ]; then + HEADER="-H X-Forwarded-Proto:$proto" + LABEL="HTTPS" + fi + echo "=== $LABEL ===" + EMAIL="compare-${LABEL,,}-$(date +%s)@example.com" + curl -s -D - -X POST $GW/api/v1/auth/register \ + -H "Content-Type: application/json" $HEADER \ + -d "{\"email\":\"$EMAIL\",\"password\":\"Compare1!\"}" 2>/dev/null | grep -i set-cookie | while read line; do + if echo "$line" | grep -q "access_token="; then + echo " access_token:" + echo " HttpOnly: $(echo "$line" | grep -qi httponly && echo YES || echo NO)" + echo " Secure: $(echo "$line" | grep -qi "secure" && echo "$line" | grep -v samesite | grep -qi secure && echo YES || echo NO)" + echo " Max-Age: $(echo "$line" | grep -oi "max-age=[0-9]*" || echo NONE)" + echo " SameSite: $(echo "$line" | grep -oi "samesite=[a-z]*")" + fi + if echo "$line" | grep -q "csrf_token="; then + echo " csrf_token:" + echo " HttpOnly: $(echo "$line" | grep -qi httponly && echo YES || echo NO)" + echo " Secure: $(echo "$line" | grep -qi "secure" && echo "$line" | grep -v samesite | grep -qi secure && echo YES || echo NO)" + echo " SameSite: $(echo "$line" | grep -oi "samesite=[a-z]*")" + fi + done +done +``` + +**预期对比表:** + +| 属性 | HTTP access_token | HTTPS access_token | HTTP csrf_token | HTTPS csrf_token | +|------|------|------|------|------| +| HttpOnly | Yes | Yes | No | No | +| Secure | No | **Yes** | No | **Yes** | +| SameSite | Lax | Lax | Strict | Strict | +| Max-Age | 无(session cookie) | **604800**(7天) | 无 | 无 | + +### 3.4 越权访问 + +#### TC-ATK-08: 无 cookie 访问受保护接口 + +```bash +for path in /api/models /api/mcp/config /api/memory /api/skills \ + /api/agents /api/channels; do + echo "$path: $(curl -s -w '%{http_code}' -o /dev/null $BASE$path)" +done +``` + +**预期:** 全部 401 + +#### TC-ATK-09: 伪造 JWT + +```bash +# 用不同 secret 签名的 token +FAKE_TOKEN=$(python3 -c " +import jwt +print(jwt.encode({'sub':'admin-id','ver':0,'exp':9999999999}, 'wrong-secret', algorithm='HS256')) +") + +curl -s -w "%{http_code}" $BASE/api/v1/auth/me \ + --cookie "access_token=$FAKE_TOKEN" +``` + +**预期:** 401(签名验证失败) + +#### TC-ATK-10: 过期 JWT + +```bash +# 不依赖环境变量,直接用一个已过期的、随机 secret 签名的 token +# 无论 secret 是否匹配,过期 token 都会被拒绝 +EXPIRED_TOKEN=$(python3 -c " +import jwt, time +print(jwt.encode({'sub':'x','ver':0,'exp':int(time.time())-100}, 'any-secret-32chars-placeholder!!', algorithm='HS256')) +") + +curl -s -w "%{http_code}" -o /dev/null $BASE/api/v1/auth/me \ + --cookie "access_token=$EXPIRED_TOKEN" +``` + +**预期:** 401(过期 or 签名不匹配,均被拒绝) + +### 3.5 密码安全 + +#### TC-ATK-11: 密码长度不足 + +```bash +curl -s -X POST $BASE/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"short@example.com","password":"1234567"}' -w "\nHTTP %{http_code}" +``` + +**预期:** 422(Pydantic validation: min_length=8) + +#### TC-ATK-12: 密码不以明文存储 + +```bash +# 检查数据库 +sqlite3 backend/.deer-flow/users.db "SELECT email, password_hash FROM users LIMIT 3;" +``` + +**预期:** `password_hash` 以 `$2b$` 开头(bcrypt 格式) + +--- + +## 四、UI 操作测试 + +> 浏览器中操作,验证前后端联动。 + +### 4.1 首次登录流程 + +#### TC-UI-01: 访问首页跳转登录 + +1. 打开 `http://localhost:2026/workspace` +2. **预期:** 自动跳转到 `/login` + +#### TC-UI-02: Login 页面 + +1. 输入 admin 邮箱和控制台密码 +2. 点击 Login +3. **预期:** 跳转到 `/setup`(因为 `needs_setup=true`) + +#### TC-UI-03: Setup 页面 + +1. 输入新邮箱、控制台密码(current)、新密码、确认密码 +2. 点击 Complete Setup +3. **预期:** 跳转到 `/workspace` +4. 刷新页面不跳回 `/setup` + +#### TC-UI-04: Setup 密码不匹配 + +1. 新密码和确认密码不一致 +2. 点击 Complete Setup +3. **预期:** 显示 "Passwords do not match" 错误 + +### 4.2 日常使用 + +#### TC-UI-05: 创建对话 + +1. 在 workspace 发送一条消息 +2. **预期:** 左侧栏出现新 thread + +#### TC-UI-06: 对话持久化 + +1. 创建对话后刷新页面 +2. **预期:** 对话列表和内容仍然存在 + +#### TC-UI-07: 登出 + +1. 点击头像 → Logout +2. **预期:** 跳转到首页 `/` +3. 直接访问 `/workspace` → 跳转到 `/login` + +### 4.3 多用户隔离 + +#### TC-UI-08: 用户 A 看不到用户 B 的对话 + +1. 用户 A 在浏览器 1 登录,创建一个对话并发消息 +2. 用户 B 在浏览器 2(或隐身窗口)注册并登录 +3. **预期:** 用户 B 的 workspace 左侧栏为空,看不到用户 A 的对话 + +#### TC-UI-09: 直接 URL 访问他人 Thread + +1. 复制用户 A 的 thread URL +2. 在用户 B 的浏览器中访问 +3. **预期:** 404 或空白页,不显示对话内容 + +### 4.4 Session 管理 + +#### TC-UI-10: Tab 切换 Session 检查 + +1. 登录 workspace +2. 切换到其他 tab 等待 60+ 秒 +3. 切回 workspace tab +4. **预期:** 静默检查 session,页面正常(控制台无 401 刷屏) + +#### TC-UI-11: Session 过期后 Tab 切回 + +1. 登录 workspace +2. 在另一个 tab 改密码(使当前 session 失效) +3. 切回 workspace tab +4. **预期:** 自动跳转到 `/login` + +#### TC-UI-12: 改密码后 Settings 页面 + +1. 进入 Settings → Account +2. 修改密码 +3. **预期:** 成功提示,页面不需要重新登录(cookie 已自动更新) + +### 4.5 注册流程 + +#### TC-UI-13: 从登录页跳转注册 + +1. 在 `/login` 页面点击注册链接 +2. 输入邮箱和密码 +3. **预期:** 注册成功后自动跳转 `/workspace` + +#### TC-UI-14: 重复邮箱注册 + +1. 用已注册的邮箱尝试注册 +2. **预期:** 显示 "Email already registered" 错误 + +### 4.6 密码重置(CLI) + +#### TC-UI-15: reset_admin 后重新登录 + +1. 执行 `cd backend && python -m app.gateway.auth.reset_admin` +2. 使用新密码登录 +3. **预期:** 跳转到 `/setup` 页面(`needs_setup` 被重置为 true) +4. 旧 session 已失效 + +--- + +## 五、升级测试 + +> 模拟从无 auth 版本(main 分支)升级到 auth 版本(feat/rfc-001-auth-module)。 + +### 5.1 准备旧版数据 + +```bash +# 1. 切到 main 分支,启动服务 +git stash && git checkout main +make dev + +# 2. 创建一些对话数据(无 auth,直接访问) +curl -s -X POST http://localhost:2026/api/langgraph/threads \ + -H "Content-Type: application/json" \ + -d '{"metadata":{"title":"old-thread-1"}}' | jq .thread_id + +curl -s -X POST http://localhost:2026/api/langgraph/threads \ + -H "Content-Type: application/json" \ + -d '{"metadata":{"title":"old-thread-2"}}' | jq .thread_id + +# 3. 记录 thread 数量 +curl -s http://localhost:2026/api/langgraph/threads | jq length +# 预期: 2+ + +# 4. 停止服务 +make stop +``` + +### 5.2 升级并启动 + +```bash +# 5. 切到 auth 分支 +git checkout feat/rfc-001-auth-module && git stash pop +make install +make dev +``` + +#### TC-UPG-01: 首次启动创建 admin + +**预期:** +- [ ] 控制台输出 admin 邮箱(`admin@deerflow.dev`)和随机密码 +- [ ] 无报错,正常启动 + +#### TC-UPG-02: 旧 Thread 迁移到 admin + +```bash +# 登录 admin +curl -s -X POST http://localhost:2026/api/v1/auth/login/local \ + -d "username=admin@deerflow.dev&password=<控制台密码>" \ + -c cookies.txt + +# 查看 thread 列表 +CSRF=$(grep csrf_token cookies.txt | awk '{print $NF}') +curl -s -X POST http://localhost:2026/api/threads/search \ + -b cookies.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: $CSRF" \ + -d '{}' | jq length +``` + +**预期:** +- [ ] 返回的 thread 数量 ≥ 旧版创建的数量 +- [ ] 控制台日志有 `Migrated N orphaned thread(s) to admin` +- [ ] 每个 thread 的 `metadata.owner_id` 都已被设为 admin 的 ID + +#### TC-UPG-03: 旧 Thread 内容完整 + +```bash +# 检查某个旧 thread 的内容 +curl -s http://localhost:2026/api/threads/ \ + -b cookies.txt | jq .metadata +``` + +**预期:** +- [ ] `metadata.title` 保留原值(如 `old-thread-1`) +- [ ] `metadata.owner_id` 已填充 + +#### TC-UPG-04: 新用户看不到旧 Thread + +```bash +# 注册新用户 +curl -s -X POST http://localhost:2026/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"newuser@example.com","password":"NewPass123!"}' \ + -c newuser.txt + +CSRF2=$(grep csrf_token newuser.txt | awk '{print $NF}') +curl -s -X POST http://localhost:2026/api/threads/search \ + -b newuser.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: $CSRF2" \ + -d '{}' | jq length +``` + +**预期:** 返回 0(旧 thread 属于 admin,新用户不可见) + +### 5.3 数据库 Schema 兼容 + +#### TC-UPG-05: 无 users.db 时自动创建 + +```bash +ls -la backend/.deer-flow/users.db +``` + +**预期:** 文件存在,`sqlite3` 可查到 `users` 表含 `needs_setup`、`token_version` 列 + +#### TC-UPG-06: users.db WAL 模式 + +```bash +sqlite3 backend/.deer-flow/users.db "PRAGMA journal_mode;" +``` + +**预期:** 返回 `wal` + +### 5.4 配置兼容 + +#### TC-UPG-07: 无 AUTH_JWT_SECRET 的旧 .env 文件 + +```bash +# 确认 .env 中没有 AUTH_JWT_SECRET +grep AUTH_JWT_SECRET backend/.env || echo "NOT SET" +``` + +**预期:** +- [ ] 启动时 warning:`AUTH_JWT_SECRET is not set — using auto-generated ephemeral secret` +- [ ] 服务正常可用 +- [ ] 重启后旧 session 失效(临时密钥变了) + +#### TC-UPG-08: 旧 config.yaml 无 auth 相关配置 + +```bash +# 检查 config.yaml 没有 auth 段 +grep -c "auth" config.yaml || echo "0" +``` + +**预期:** auth 模块不依赖 config.yaml(配置走环境变量),旧 config.yaml 不影响启动 + +### 5.5 前端兼容 + +#### TC-UPG-09: 旧前端缓存 + +1. 用旧版前端的浏览器缓存访问升级后的服务 +2. **预期:** 被 AuthMiddleware 拦截返回 401(旧前端无 cookie),页面自然刷新后加载新前端 + +#### TC-UPG-10: 书签 URL + +1. 用升级前保存的 workspace URL(如 `localhost:2026/workspace/chats/xxx`)直接访问 +2. **预期:** 跳转到 `/login`,登录后跳回原 URL(`?next=` 参数) + +### 5.6 降级回滚 + +#### TC-UPG-11: 回退到 main 分支 + +```bash +make stop +git checkout main +make dev +``` + +**预期:** +- [ ] 服务正常启动(忽略 `users.db`,无 auth 相关代码不报错) +- [ ] 旧对话数据仍然可访问 +- [ ] `users.db` 文件残留但不影响运行 + +#### TC-UPG-12: 再次升级到 auth 分支 + +```bash +make stop +git checkout feat/rfc-001-auth-module +make dev +``` + +**预期:** +- [ ] 识别已有 `users.db`,不重新创建 admin +- [ ] 旧的 admin 账号仍可登录(如果回退期间未删 `users.db`) + +### 5.7 休眠 Admin(初始密码未使用/未更改) + +> 首次启动生成 admin + 随机密码,但运维未登录、未改密码。 +> 密码只在首次启动的控制台闪过一次,后续启动不再显示。 + +#### TC-UPG-13: 重启后自动重置密码并打印 + +```bash +# 首次启动,记录密码 +rm -f backend/.deer-flow/users.db +make dev +# 控制台输出密码 P0,不登录 +make stop + +# 隔了几天,再次启动 +make dev +# 控制台输出新密码 P1 +``` + +**预期:** +- [ ] 控制台输出 `Admin account setup incomplete — password reset` +- [ ] 输出新密码 P1(P0 已失效) +- [ ] 用 P1 可以登录,P0 不可以 +- [ ] 登录后 `needs_setup=true`,跳转 `/setup` +- [ ] `token_version` 递增(旧 session 如有也失效) + +#### TC-UPG-14: 密码丢失 — 无需 CLI,重启即可 + +```bash +# 忘记了控制台密码 → 直接重启服务 +make stop && make dev +# 控制台自动输出新密码 +``` + +**预期:** +- [ ] 无需 `reset_admin`,重启服务即可拿到新密码 +- [ ] `reset_admin` CLI 仍然可用作手动备选方案 + +#### TC-UPG-15: 休眠 admin 期间普通用户注册 + +```bash +# admin 存在但从未登录,普通用户先注册 +curl -s -X POST $BASE/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"earlybird@example.com","password":"EarlyPass1!"}' \ + -c early.txt -w "\nHTTP %{http_code}" +``` + +**预期:** +- [ ] 注册成功(201),角色为 `user` +- [ ] 无法提权为 admin +- [ ] 普通用户的数据与 admin 隔离 + +#### TC-UPG-16: 休眠 admin 不影响后续操作 + +```bash +# 普通用户正常创建 thread、发消息 +CSRF=$(grep csrf_token early.txt | awk '{print $NF}') +curl -s -X POST $BASE/api/threads \ + -b early.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: $CSRF" \ + -d '{"metadata":{}}' | jq .thread_id +``` + +**预期:** 正常创建,不受休眠 admin 影响 + +#### TC-UPG-17: 休眠 admin 最终完成 Setup + +```bash +# 运维终于登录 +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@deerflow.dev&password=" \ + -c admin.txt | jq .needs_setup +# 预期: true + +# 完成 setup +CSRF=$(grep csrf_token admin.txt | awk '{print $NF}') +curl -s -X POST $BASE/api/v1/auth/change-password \ + -b admin.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: $CSRF" \ + -d '{"current_password":"<密码>","new_password":"AdminFinal1!","new_email":"admin@real.com"}' \ + -c admin.txt + +# 验证 +curl -s $BASE/api/v1/auth/me -b admin.txt | jq '{email, needs_setup}' +``` + +**预期:** +- [ ] `email` 变为 `admin@real.com` +- [ ] `needs_setup` 变为 `false` +- [ ] 后续重启控制台不再有 warning + +#### TC-UPG-18: 长期未用后 JWT 密钥轮换 + +```bash +# 场景:admin 未登录期间,运维更换了 AUTH_JWT_SECRET +# 1. 首次启动用自动生成的临时密钥 +# 2. 某天运维在 .env 设置了固定密钥 +echo "AUTH_JWT_SECRET=$(python3 -c 'import secrets; print(secrets.token_urlsafe(32))')" >> .env +make stop && make dev +``` + +**预期:** +- [ ] 服务正常启动 +- [ ] 旧密码仍可登录(密码存在 DB,与 JWT 密钥无关) +- [ ] 旧的 JWT token 失效(密钥变了签名不匹配)— 但因为从未登录过也没有旧 token + +--- + +## 六、可重入测试 + +> 验证 auth 模块在重复操作、并发、中断恢复等场景下行为正确,无竞态条件。 + +### 6.1 启动可重入 + +#### TC-REENT-01: 连续重启不重复创建 admin + +```bash +# 连续启动 3 次(daemon 模式,避免前台阻塞) +for i in 1 2 3; do + make dev-daemon && sleep 10 && make stop +done + +# 检查 admin 数量 +sqlite3 backend/.deer-flow/users.db \ + "SELECT COUNT(*) FROM users WHERE system_role='admin';" +``` + +**预期:** 始终为 1。不会因重启创建多个 admin。 + +#### TC-REENT-02: 多进程同时启动 + +```bash +# 模拟两个 gateway 进程同时启动(竞争 admin 创建) +cd backend +PYTHONPATH=. uv run python -c " +import asyncio +from app.gateway.app import create_app, _ensure_admin_user + +async def boot(): + app = create_app() + # 模拟两个并发 ensure_admin + await asyncio.gather( + _ensure_admin_user(app), + _ensure_admin_user(app), + ) + +asyncio.run(boot()) +" 2>&1 | grep -i "admin\|error\|duplicate" +``` + +**预期:** +- [ ] 不报错(SQLite UNIQUE 约束捕获竞争,第二个静默跳过) +- [ ] 最终只有 1 个 admin + +#### TC-REENT-03: Thread 迁移幂等 + +```bash +# 连续调用 _migrate_orphaned_threads 两次 +# 第二次应无 thread 需要迁移(已有 user_id) +``` + +**预期:** 第二次 `migrated = 0`,无副作用 + +### 6.2 登录可重入 + +#### TC-REENT-04: 重复登录获取新 cookie + +```bash +# 同一用户连续登录 3 次 +for i in 1 2 3; do + curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" \ + -c "cookies_$i.txt" -o /dev/null +done + +# 三个 cookie 都有效 +for i in 1 2 3; do + echo "Cookie $i: $(curl -s -w '%{http_code}' -o /dev/null $BASE/api/v1/auth/me -b cookies_$i.txt)" +done +``` + +**预期:** 三个 cookie 都返回 200(未改密码,token_version 相同,多 session 共存) + +#### TC-REENT-05: 登录-登出-登录 + +```bash +# 登录 +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" \ + -c cookies.txt -o /dev/null + +# 登出 +curl -s -X POST $BASE/api/v1/auth/logout -b cookies.txt -o /dev/null + +# 再次登录 +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" \ + -c cookies.txt + +curl -s -w "%{http_code}" $BASE/api/v1/auth/me -b cookies.txt +``` + +**预期:** 200。登出→再登录流程无状态残留。 + +### 6.3 改密码可重入 + +#### TC-REENT-06: 连续两次改密码 + +```bash +CSRF=$(grep csrf_token cookies.txt | awk '{print $NF}') + +# 第一次改密码 +curl -s -X POST $BASE/api/v1/auth/change-password \ + -b cookies.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: $CSRF" \ + -d '{"current_password":"Pass1","new_password":"Pass2"}' \ + -c cookies.txt + +# 用新 cookie 的 CSRF 再改一次 +CSRF=$(grep csrf_token cookies.txt | awk '{print $NF}') +curl -s -X POST $BASE/api/v1/auth/change-password \ + -b cookies.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: $CSRF" \ + -d '{"current_password":"Pass2","new_password":"Pass3"}' \ + -c cookies.txt + +curl -s -w "%{http_code}" $BASE/api/v1/auth/me -b cookies.txt +``` + +**预期:** +- [ ] 两次改密码都成功 +- [ ] 最终密码为 Pass3 +- [ ] `token_version` 递增两次(+2) +- [ ] 最新 cookie 有效 + +#### TC-REENT-07: 改密码后旧 cookie 全部失效 + +```bash +# 保存三个时间点的 cookie +# t1: 初始登录 → cookies_t1.txt +# t2: 第一次改密码后 → cookies_t2.txt +# t3: 第二次改密码后 → cookies_t3.txt + +# 用 t1 和 t2 的 cookie 访问 +curl -s -w "%{http_code}" $BASE/api/v1/auth/me -b cookies_t1.txt # 预期 401 +curl -s -w "%{http_code}" $BASE/api/v1/auth/me -b cookies_t2.txt # 预期 401 +curl -s -w "%{http_code}" $BASE/api/v1/auth/me -b cookies_t3.txt # 预期 200 +``` + +**预期:** 只有最新的 cookie 有效,历史 cookie 因 token_version 不匹配全部 401 + +### 6.4 注册可重入 + +#### TC-REENT-08: 同一邮箱并发注册 + +```bash +# 并发发送两个相同邮箱的注册请求 +curl -s -X POST $BASE/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"race@example.com","password":"RacePass1!"}' & +curl -s -X POST $BASE/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"race@example.com","password":"RacePass1!"}' & +wait + +# 检查用户数 +sqlite3 backend/.deer-flow/users.db \ + "SELECT COUNT(*) FROM users WHERE email='race@example.com';" +``` + +**预期:** +- [ ] 一个成功(201),一个失败(400 `email_already_exists`) +- [ ] 数据库中只有 1 条记录(UNIQUE 约束保护) + +### 6.5 Rate Limiter 可重入 + +#### TC-REENT-09: 限速过期后重新计数 + +```bash +# 触发锁定(5 次错误) +for i in $(seq 1 5); do + curl -s -o /dev/null -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=wrong" +done + +# 确认被锁定 +curl -s -w "%{http_code}" -o /dev/null -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=wrong" +# 预期: 429 + +# 等待锁定过期(5 分钟)或重启服务清除内存计数器 +make stop && make dev + +# 重新尝试 — 计数器应已重置 +curl -s -w "%{http_code}" -o /dev/null -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=wrong" +# 预期: 401(不是 429) +``` + +**预期:** 锁定过期后恢复正常限速(从 0 开始计数),而非累积 + +#### TC-REENT-10: 成功登录重置计数后再次失败 + +```bash +# 3 次失败 +for i in $(seq 1 3); do + curl -s -o /dev/null -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=wrong" +done + +# 1 次成功(重置计数) +curl -s -o /dev/null -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" + +# 再 4 次失败(从 0 重新计数,未达阈值 5) +for i in $(seq 1 4); do + curl -s -w "attempt $i: %{http_code}\n" -o /dev/null -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=wrong" +done +``` + +**预期:** 4 次全部返回 401(未锁定),因为成功登录已重置计数器 + +### 6.6 CSRF Token 可重入 + +#### TC-REENT-11: 登录后多次 POST 使用同一 CSRF token + +```bash +CSRF=$(grep csrf_token cookies.txt | awk '{print $NF}') + +# 同一 CSRF token 多次使用 +for i in 1 2 3; do + echo "Request $i: $(curl -s -w '%{http_code}' -o /dev/null \ + -X POST $BASE/api/threads \ + -b cookies.txt \ + -H 'Content-Type: application/json' \ + -H "X-CSRF-Token: $CSRF" \ + -d '{"metadata":{}}')" +done +``` + +**预期:** 三次都成功(CSRF token 是 Double Submit Cookie,不是一次性 nonce) + +### 6.7 Thread 操作可重入 + +#### TC-REENT-12: 重复删除同一 Thread + +```bash +CSRF=$(grep csrf_token cookies.txt | awk '{print $NF}') + +# 创建 thread +TID=$(curl -s -X POST $BASE/api/threads \ + -b cookies.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: $CSRF" \ + -d '{"metadata":{}}' | jq -r .thread_id) + +# 第一次删除 +curl -s -w "%{http_code}" -X DELETE "$BASE/api/threads/$TID" \ + -b cookies.txt -H "X-CSRF-Token: $CSRF" +# 预期: 200 + +# 第二次删除(幂等) +curl -s -w "%{http_code}" -X DELETE "$BASE/api/threads/$TID" \ + -b cookies.txt -H "X-CSRF-Token: $CSRF" +``` + +**预期:** 第二次返回 200 或 404,不报 500 + +### 6.8 reset_admin 可重入 + +#### TC-REENT-13: 连续两次 reset_admin + +```bash +cd backend +python -m app.gateway.auth.reset_admin +# 记录密码 P1 + +python -m app.gateway.auth.reset_admin +# 记录密码 P2 +``` + +**预期:** +- [ ] P1 ≠ P2(每次生成新随机密码) +- [ ] P1 不可用,只有 P2 有效 +- [ ] `token_version` 递增了 2 +- [ ] `needs_setup` 为 True + +### 6.9 Setup 流程可重入 + +#### TC-REENT-14: 完成 Setup 后再访问 /setup 页面 + +1. 完成 admin setup(改邮箱 + 改密码) +2. 直接访问 `/setup` +3. **预期:** 应跳转到 `/workspace`(`needs_setup` 已为 false,SSR guard 不会返回 `needs_setup` tag) + +#### TC-REENT-15: Setup 中途刷新页面 + +1. 在 `/setup` 页面填写一半 +2. 刷新页面 +3. **预期:** 仍在 `/setup`(`needs_setup` 仍为 true),表单清空但不报错 + +--- + +## 七、模式差异测试 + +> 以下用 `GW=http://localhost:8001` 表示直连 Gateway,`BASE=http://localhost:2026` 表示经 nginx。 +> Gateway 模式启动命令:`make dev-pro`(或 `./scripts/serve.sh --dev --gateway`)。 + +### 7.1 标准模式独有 + +> 启动命令:`make dev`(或 `./scripts/serve.sh --dev`) + +#### TC-MODE-01: LangGraph Server 独立运行,需 cookie + +```bash +# 无 cookie 访问 LangGraph +curl -s -w "%{http_code}" -o /dev/null $BASE/api/langgraph/threads/search +# 预期: 403(LangGraph auth handler 拒绝) +``` + +#### TC-MODE-02: LangGraph auth 的 token_version 检查 + +```bash +# 登录拿 cookie +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" -c cookies.txt + +# 改密码(bumps token_version) +CSRF=$(grep csrf_token cookies.txt | awk '{print $NF}') +curl -s -X POST $BASE/api/v1/auth/change-password \ + -b cookies.txt -H "Content-Type: application/json" -H "X-CSRF-Token: $CSRF" \ + -d '{"current_password":"正确密码","new_password":"NewPass1!"}' -c new_cookies.txt + +# 用旧 cookie 访问 LangGraph +curl -s -w "%{http_code}" $BASE/api/langgraph/threads/search -b cookies.txt +# 预期: 403(token_version 不匹配) + +# 用新 cookie 访问 +CSRF2=$(grep csrf_token new_cookies.txt | awk '{print $NF}') +curl -s -w "%{http_code}" -X POST $BASE/api/langgraph/threads/search \ + -b new_cookies.txt -H "Content-Type: application/json" -H "X-CSRF-Token: $CSRF2" -d '{}' +# 预期: 200 +``` + +#### TC-MODE-03: LangGraph auth 的 owner filter 隔离 + +```bash +# user1 创建 thread +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=user1@example.com&password=UserPass1!" -c u1.txt +CSRF1=$(grep csrf_token u1.txt | awk '{print $NF}') +TID=$(curl -s -X POST $BASE/api/langgraph/threads \ + -b u1.txt -H "Content-Type: application/json" -H "X-CSRF-Token: $CSRF1" \ + -d '{"metadata":{}}' | python3 -c "import sys,json; print(json.load(sys.stdin)['thread_id'])") + +# user2 搜索 — 应看不到 user1 的 thread +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=user2@example.com&password=UserPass2!" -c u2.txt +CSRF2=$(grep csrf_token u2.txt | awk '{print $NF}') +curl -s -X POST $BASE/api/langgraph/threads/search \ + -b u2.txt -H "Content-Type: application/json" -H "X-CSRF-Token: $CSRF2" -d '{}' | python3 -c " +import sys,json +threads = json.load(sys.stdin) +ids = [t['thread_id'] for t in threads] +assert '$TID' not in ids, 'LEAK: user2 can see user1 thread' +print('OK: user2 sees', len(threads), 'threads, none belong to user1') +" +``` + +### 7.2 Gateway 模式独有 + +> 启动命令:`make dev-pro`(或 `./scripts/serve.sh --dev --gateway`) +> 无 LangGraph Server 进程,agent runtime 嵌入 Gateway。 + +#### TC-MODE-04: 所有请求经 AuthMiddleware + +```bash +# 确认 LangGraph Server 未运行 +curl -s -w "%{http_code}" -o /dev/null http://localhost:2024/ok +# 预期: 000(连接被拒) + +# Gateway API 受保护 +curl -s -w "%{http_code}" -o /dev/null $BASE/api/models +# 预期: 401 + +# LangGraph 兼容路由(rewrite 到 Gateway)也受保护 +curl -s -w "%{http_code}" -o /dev/null -X POST $BASE/api/langgraph/threads/search \ + -H "Content-Type: application/json" -d '{}' +# 预期: 401 +``` + +#### TC-MODE-05: Gateway 模式下完整 auth 流程 + +```bash +# 登录 +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" -c cookies.txt + +CSRF=$(grep csrf_token cookies.txt | awk '{print $NF}') + +# 创建 thread(走 Gateway 内嵌 runtime) +curl -s -X POST $BASE/api/langgraph/threads \ + -b cookies.txt -H "Content-Type: application/json" -H "X-CSRF-Token: $CSRF" \ + -d '{"metadata":{}}' | python3 -c "import sys,json; print(json.load(sys.stdin)['thread_id'])" +# 预期: 返回 thread_id + +# CSRF 保护(Gateway 模式下 CSRFMiddleware 直接覆盖所有路由) +curl -s -w "%{http_code}" -o /dev/null -X POST $BASE/api/langgraph/threads \ + -b cookies.txt -H "Content-Type: application/json" -d '{"metadata":{}}' +# 预期: 403(CSRF token missing) +``` + +### 7.3 直连 Gateway(无 nginx) + +> 启动命令:`cd backend && make gateway`(端口 8001) +> 不经过 nginx,直接测试 Gateway 的 auth 层。 + +#### TC-GW-01: AuthMiddleware 保护所有非 public 路由 + +```bash +GW=http://localhost:8001 + +for path in /api/models /api/mcp/config /api/memory /api/skills \ + /api/v1/auth/me /api/v1/auth/change-password; do + echo "$path: $(curl -s -w '%{http_code}' -o /dev/null $GW$path)" +done +# 预期: 全部 401 +``` + +#### TC-GW-02: Public 路由不需要 cookie + +```bash +GW=http://localhost:8001 + +for path in /health /api/v1/auth/setup-status /api/v1/auth/login/local /api/v1/auth/register; do + echo "$path: $(curl -s -w '%{http_code}' -o /dev/null $GW$path)" +done +# 预期: 200 或 405/422(方法不对但不是 401) +``` + +#### TC-GW-03: 直连 Gateway 注册 + 登录 + CSRF 完整流程 + +```bash +GW=http://localhost:8001 + +# 注册 +curl -s -X POST $GW/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"gwtest@example.com","password":"GwTest123!"}' \ + -c gw_cookies.txt -w "\nHTTP %{http_code}" +# 预期: 201 + +# 登录 +curl -s -X POST $GW/api/v1/auth/login/local \ + -d "username=gwtest@example.com&password=GwTest123!" \ + -c gw_cookies.txt -w "\nHTTP %{http_code}" +# 预期: 200 + +# GET(不需要 CSRF) +curl -s -w "%{http_code}" $GW/api/models -b gw_cookies.txt +# 预期: 200 + +# POST 无 CSRF +curl -s -w "%{http_code}" -o /dev/null -X POST $GW/api/memory/reload -b gw_cookies.txt +# 预期: 403(CSRF token missing) + +# POST 有 CSRF +CSRF=$(grep csrf_token gw_cookies.txt | awk '{print $NF}') +curl -s -w "%{http_code}" -o /dev/null -X POST $GW/api/memory/reload \ + -b gw_cookies.txt -H "X-CSRF-Token: $CSRF" +# 预期: 200 +``` + +#### TC-GW-04: 直连 Gateway 的 Rate Limiter + +```bash +GW=http://localhost:8001 + +# 直连时 request.client.host 是真实 IP(无 nginx 代理),不读 X-Real-IP +for i in $(seq 1 6); do + echo -n "attempt $i: " + curl -s -w "%{http_code}\n" -o /dev/null -X POST $GW/api/v1/auth/login/local \ + -d "username=admin@example.com&password=wrong" +done +# 预期: 前 5 次 401,第 6 次 429 +``` + +#### TC-GW-05: 直连 Gateway 不受 X-Real-IP 欺骗 + +```bash +GW=http://localhost:8001 + +# 直连时 client.host 不是 trusted proxy,X-Real-IP 被忽略 +for i in $(seq 1 6); do + echo -n "attempt $i (X-Real-IP spoofed): " + curl -s -w "%{http_code}\n" -o /dev/null -X POST $GW/api/v1/auth/login/local \ + -H "X-Real-IP: 10.0.0.$i" \ + -d "username=admin@example.com&password=wrong" +done +# 预期: 前 5 次 401,第 6 次 429(伪造的 X-Real-IP 无效,所有请求共享真实 IP 的桶) +``` + +### 7.4 Docker 部署 + +> 启动命令:`./scripts/deploy.sh`(标准)或 `./scripts/deploy.sh --gateway`(Gateway 模式) +> Docker Compose 文件:`docker/docker-compose.yaml` +> +> 前置条件: +> - `.env` 中设置 `AUTH_JWT_SECRET`(否则每次容器重启 session 全部失效) +> - `DEER_FLOW_HOME` 挂载到宿主机目录(持久化 `users.db`) + +#### TC-DOCKER-01: users.db 通过 volume 持久化 + +```bash +# 启动容器 +./scripts/deploy.sh + +# 等待启动完成 +sleep 15 +BASE=http://localhost:2026 + +# 注册用户 +curl -s -X POST $BASE/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"docker-test@example.com","password":"DockerTest1!"}' -w "\nHTTP %{http_code}" + +# 检查宿主机上的 users.db +ls -la ${DEER_FLOW_HOME:-backend/.deer-flow}/users.db +sqlite3 ${DEER_FLOW_HOME:-backend/.deer-flow}/users.db \ + "SELECT email FROM users WHERE email='docker-test@example.com';" +``` + +**预期:** users.db 在宿主机 `DEER_FLOW_HOME` 目录中,查询可见刚注册的用户。 + +#### TC-DOCKER-02: 重启容器后 session 保持 + +```bash +# 登录拿 cookie +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=docker-test@example.com&password=DockerTest1!" \ + -c docker_cookies.txt -o /dev/null + +# 验证 cookie 有效 +curl -s -w "%{http_code}" -o /dev/null $BASE/api/v1/auth/me -b docker_cookies.txt +# 预期: 200 + +# 重启容器(不删 volume) +./scripts/deploy.sh down && ./scripts/deploy.sh +sleep 15 + +# 用旧 cookie 访问 +curl -s -w "%{http_code}" -o /dev/null $BASE/api/v1/auth/me -b docker_cookies.txt +``` + +**预期:** +- 有 `AUTH_JWT_SECRET` → 200(session 保持) +- 无 `AUTH_JWT_SECRET` → 401(每次启动生成新临时密钥,旧 JWT 签名失效) + +#### TC-DOCKER-03: 多 Worker 下 Rate Limiter 独立 + +```bash +# docker-compose.yaml 中 gateway 默认 4 workers +# 每个 worker 有独立的 _login_attempts dict +# 限速可能不精确(请求分散到不同 worker),但不会完全失效 + +for i in $(seq 1 20); do + echo -n "attempt $i: " + curl -s -w "%{http_code}\n" -o /dev/null -X POST $BASE/api/v1/auth/login/local \ + -d "username=docker-test@example.com&password=wrong" +done +``` + +**预期:** 在某个点开始返回 429(每个 worker 独立计数,阈值可能在 5~20 之间触发,取决于负载均衡分布)。 + +**已知限制:** In-process rate limiter 不跨 worker 共享。生产环境如需精确限速,需要 Redis 等外部存储。 + +#### TC-DOCKER-04: IM 渠道不经过 auth + +```bash +# IM 渠道(Feishu/Slack/Telegram)在 gateway 容器内部通过 LangGraph SDK 通信 +# 不走 nginx,不经过 AuthMiddleware + +# 验证方式:检查 gateway 日志中 channel manager 的请求不包含 auth 错误 +docker logs deer-flow-gateway 2>&1 | grep -E "ChannelManager|channel" | head -10 +``` + +**预期:** 无 auth 相关错误。渠道通过 `langgraph-sdk` 直连 LangGraph Server(`http://langgraph:2024`),不走 auth 层。 + +#### TC-DOCKER-05: admin 密码写入 0600 凭证文件(不再走日志) + +```bash +# 凭证文件写在挂载到宿主机的 DEER_FLOW_HOME 下 +ls -la ${DEER_FLOW_HOME:-backend/.deer-flow}/admin_initial_credentials.txt +# 预期文件权限: -rw------- (0600) + +cat ${DEER_FLOW_HOME:-backend/.deer-flow}/admin_initial_credentials.txt +# 预期内容: email + password 行 + +# 容器日志只输出文件路径,不输出密码本身 +docker logs deer-flow-gateway 2>&1 | grep -E "Credentials written to|Admin account" +# 预期看到: "Credentials written to: /...../admin_initial_credentials.txt (mode 0600)" + +# 反向验证: 日志里 NEVER 出现明文密码 +docker logs deer-flow-gateway 2>&1 | grep -iE "Password: .{15,}" && echo "FAIL: leaked" || echo "OK: not leaked" +``` + +**预期:** +- 凭证文件存在于 `DEER_FLOW_HOME` 下,权限 `0600` +- 容器日志输出**路径**(不是密码本身),符合 CodeQL `py/clear-text-logging-sensitive-data` 规则 +- `grep "Password:"` 在日志中**应当无匹配**(旧行为已废弃,simplify pass 移除了日志泄露路径) + +#### TC-DOCKER-06: Gateway 模式 Docker 部署 + +```bash +# Gateway 模式:无 langgraph 容器 +./scripts/deploy.sh --gateway +sleep 15 + +# 确认 langgraph 容器不存在 +docker ps --filter name=deer-flow-langgraph --format '{{.Names}}' | wc -l +# 预期: 0 + +# auth 流程正常 +curl -s -w "%{http_code}" -o /dev/null $BASE/api/models +# 预期: 401 + +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@deerflow.dev&password=<日志密码>" \ + -c cookies.txt -w "\nHTTP %{http_code}" +# 预期: 200 +``` + +### 7.4 补充边界用例 + +#### TC-EDGE-01: 格式正确但随机 JWT + +```bash +RANDOM_JWT=$(python3 -c " +import jwt, time, uuid +print(jwt.encode({'sub':str(uuid.uuid4()),'ver':0,'exp':int(time.time())+3600}, 'wrong-secret-32chars-placeholder!!', algorithm='HS256')) +") +curl -s --cookie "access_token=$RANDOM_JWT" $BASE/api/v1/auth/me | jq .detail +``` + +**预期:** `{"code": "token_invalid", "message": "Token error: invalid_signature"}` + +#### TC-EDGE-02: 注册时传 system_role=admin + +```bash +curl -s -X POST $BASE/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"hacker@example.com","password":"HackPass1!","system_role":"admin"}' | jq .system_role +``` + +**预期:** `"user"`(`system_role` 字段被忽略) + +#### TC-EDGE-03: 并发改密码 + +```bash +# 注册用户,登录两个 session +curl -s -X POST $BASE/api/v1/auth/register \ + -H "Content-Type: application/json" \ + -d '{"email":"edge03@example.com","password":"EdgePass3!"}' -o /dev/null +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=edge03@example.com&password=EdgePass3!" -c s1.txt -o /dev/null +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=edge03@example.com&password=EdgePass3!" -c s2.txt -o /dev/null + +CSRF1=$(grep csrf_token s1.txt | awk '{print $NF}') +CSRF2=$(grep csrf_token s2.txt | awk '{print $NF}') + +# 并发改密码 +curl -s -w "S1: %{http_code}\n" -o /dev/null -X POST $BASE/api/v1/auth/change-password \ + -b s1.txt -H "Content-Type: application/json" -H "X-CSRF-Token: $CSRF1" \ + -d '{"current_password":"EdgePass3!","new_password":"NewEdge3a!"}' & +curl -s -w "S2: %{http_code}\n" -o /dev/null -X POST $BASE/api/v1/auth/change-password \ + -b s2.txt -H "Content-Type: application/json" -H "X-CSRF-Token: $CSRF2" \ + -d '{"current_password":"EdgePass3!","new_password":"NewEdge3b!"}' & +wait +``` + +**预期:** 一个 200、一个 400(current_password 已变导致验证失败)。极端并发下可能两个都 200(SQLite 串行写),但最终只有一个密码生效。 + +#### TC-EDGE-04: Cookie SameSite 验证 + +> 完整的 HTTP/HTTPS cookie 属性对比见 §3.3 TC-ATK-06/07/07a。 + +```bash +curl -s -D - -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" 2>/dev/null | grep -i set-cookie +``` + +**预期:** `access_token` → `SameSite=lax`,`csrf_token` → `SameSite=strict` + +#### TC-EDGE-05: HTTP 无 max_age / HTTPS 有 max_age + +```bash +# HTTP +curl -s -D - -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" 2>/dev/null \ + | grep "access_token=" | grep -oi "max-age=[0-9]*" || echo "NO max-age (HTTP session cookie)" + +# HTTPS +curl -s -D - -X POST $BASE/api/v1/auth/login/local \ + -H "X-Forwarded-Proto: https" \ + -d "username=admin@example.com&password=正确密码" 2>/dev/null \ + | grep "access_token=" | grep -oi "max-age=[0-9]*" +``` + +**预期:** HTTP 无 `Max-Age`(session cookie,浏览器关闭即失效),HTTPS 有 `Max-Age=604800`(7 天) + +#### TC-EDGE-06: public 路径 trailing slash + +```bash +for path in /api/v1/auth/login/local/ /api/v1/auth/register/ \ + /api/v1/auth/logout/ /api/v1/auth/setup-status/; do + echo "$path: $(curl -s -w '%{http_code}' -o /dev/null $BASE$path)" +done +``` + +**预期:** 全部 307(redirect 去掉 trailing slash)或 200/405,不是 401 + +### 7.5 红队对抗测试 + +> 模拟攻击者视角,验证防线没有可利用的缝隙。 + +#### 7.5.1 路径混淆绕过 + +```bash +# 通过编码/双斜杠/路径穿越尝试绕过 AuthMiddleware 公开路径判断 +for path in \ + "//api/v1/auth/me" \ + "/api/v1/auth/login/local/../me" \ + "/api/v1/auth/login/local%2f..%2fme" \ + "/api/v1/auth/login/local/..%2Fme" \ + "/API/V1/AUTH/ME"; do + echo "$path: $(curl -s -w '%{http_code}' -o /dev/null $BASE$path)" +done +``` + +**预期:** 全部 401 或 404。不应有路径混淆导致跳过 auth 检查。 + +#### 7.5.2 CSRF 对抗矩阵 + +```bash +# 登录拿 cookie +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" -c cookies.txt + +CSRF=$(grep csrf_token cookies.txt | awk '{print $NF}') + +# Case 1: 有 cookie 无 header → 403 +curl -s -w "%{http_code}" -o /dev/null \ + -X POST $BASE/api/threads -b cookies.txt \ + -H "Content-Type: application/json" -d '{"metadata":{}}' + +# Case 2: 有 header 无 cookie → 403(删除 cookie 中的 csrf_token) +curl -s -w "%{http_code}" -o /dev/null \ + -X POST $BASE/api/threads \ + -b cookies.txt \ + -H "X-CSRF-Token: $CSRF" \ + -H "Content-Type: application/json" -d '{"metadata":{}}' + +# Case 3: header 和 cookie 不匹配 → 403 +curl -s -w "%{http_code}" -o /dev/null \ + -X POST $BASE/api/threads -b cookies.txt \ + -H "X-CSRF-Token: wrong-token" \ + -H "Content-Type: application/json" -d '{"metadata":{}}' + +# Case 4: 旧 CSRF token(登出再登录后) → 旧 token 应失效 +curl -s -X POST $BASE/api/v1/auth/logout -b cookies.txt +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" -c cookies.txt +# 用旧 CSRF 发请求 +curl -s -w "%{http_code}" -o /dev/null \ + -X POST $BASE/api/threads -b cookies.txt \ + -H "X-CSRF-Token: $CSRF" \ + -H "Content-Type: application/json" -d '{"metadata":{}}' +``` + +**预期:** Case 1-3 全部 403。Case 4 应 403(旧 CSRF 与新 cookie 不匹配)。 + +#### 7.5.3 Token Replay(登出后旧 token 重放) + +```bash +# 登录,保存 cookie +curl -s -X POST $BASE/api/v1/auth/login/local \ + -d "username=admin@example.com&password=正确密码" -c cookies.txt + +# 提取 access_token 值 +TOKEN=$(grep access_token cookies.txt | awk '{print $NF}') + +# 登出 +curl -s -X POST $BASE/api/v1/auth/logout -b cookies.txt + +# 手工注入旧 token(模拟攻击者窃取了 token) +curl -s -w "%{http_code}" -o /dev/null \ + $BASE/api/v1/auth/me --cookie "access_token=$TOKEN" +``` + +**预期:** 200(已知限制:登出只清客户端 cookie,不 bump `token_version`。旧 token 在过期前仍有效)。 +**安全备注:** 如需严格防重放,需在登出时 `token_version += 1`。当前设计选择不做,因为成本是所有设备的 session 全部失效。 + +#### 7.5.4 跨站强制登出 + +```bash +# 攻击者从第三方站点 POST /logout(无需认证、无需 CSRF) +curl -s -X POST $BASE/api/v1/auth/logout -w "%{http_code}" +``` + +**预期:** 200(logout 是 public + CSRF 豁免)。 +**风险评估:** 低——只影响可用性(被强制登出),不泄露数据。浏览器 `SameSite=Lax` 限制了真实跨站场景下 cookie 不会被带上,所以实际上第三方站点的 POST 不会清除用户 cookie。 + +#### 7.5.5 Metadata 注入攻击(所有权伪造) + +```bash +# 尝试在创建 thread 时注入其他用户的 user_id +CSRF=$(grep csrf_token cookies.txt | awk '{print $NF}') +curl -s -X POST $BASE/api/threads \ + -b cookies.txt \ + -H "Content-Type: application/json" \ + -H "X-CSRF-Token: $CSRF" \ + -d '{"metadata":{"owner_id":"victim-user-id"}}' | jq .metadata.owner_id +``` + +**预期:** 返回的 `metadata.owner_id` 应为当前登录用户的 ID,不是请求中注入的 `victim-user-id`。服务端应覆盖客户端提供的 `user_id`。 + +#### 7.5.6 HTTP Method 探测 + +```bash +# HEAD/OPTIONS 不应泄露受保护资源信息 +for method in HEAD OPTIONS TRACE; do + echo "$method /api/models: $(curl -s -w '%{http_code}' -o /dev/null -X $method $BASE/api/models)" +done +``` + +**预期:** HEAD/OPTIONS 返回 401 或 405。TRACE 应返回 405。 + +#### 7.5.7 Rate Limiter IP 维度缺陷验证 + +```bash +# 通过不同的 X-Forwarded-For 绕过限速(验证是否用 client.host 而非 header) +for i in $(seq 1 6); do + curl -s -w "attempt $i: %{http_code}\n" -o /dev/null \ + -X POST $BASE/api/v1/auth/login/local \ + -H "X-Forwarded-For: 10.0.0.$i" \ + -d "username=admin@example.com&password=wrong" +done +``` + +**预期:** 如果 rate limiter 基于 `request.client.host`(实际 TCP 连接 IP),所有请求来自同一 IP,第 6 个应返回 429。X-Forwarded-For 不应影响限速判断。 + +#### 7.5.8 Junk Cookie 穿透验证 + +```bash +# middleware 只检查 cookie 存在性,不验证 JWT +# 确认 junk cookie 能过 middleware 但被下游 @require_auth 拦截 +curl -s -w "%{http_code}" $BASE/api/v1/auth/me \ + --cookie "access_token=not-a-jwt" +``` + +**预期:** 401(middleware 放行,`get_current_user_from_request` 解码失败返回 401)。 +**安全备注:** middleware 是 presence-only 检查,有意设计。完整验证交给 `@require_auth`。 + +#### 7.5.9 路由覆盖审计 + +```bash +# 列出所有注册的路由,检查哪些没有 @require_auth +cd backend && PYTHONPATH=. python3 -c " +from app.gateway.app import create_app +app = create_app() +public_prefixes = ['/health', '/docs', '/redoc', '/openapi.json', + '/api/v1/auth/login', '/api/v1/auth/register', + '/api/v1/auth/logout', '/api/v1/auth/setup-status'] +for route in app.routes: + path = getattr(route, 'path', '') + if not path or not path.startswith('/api'): + continue + is_public = any(path.startswith(p) for p in public_prefixes) + if not is_public: + print(f' {path}') +" 2>/dev/null +``` + +**预期:** 列出的所有路由都应由 AuthMiddleware(cookie 存在性)+ `@require_auth`/`@require_permission`(JWT 验证)双层保护。检查是否有遗漏。 + +--- + +## 八、回归清单 + +每次 auth 相关代码变更后必须通过: + +```bash +# 单元测试(168 个) +cd backend && PYTHONPATH=. uv run pytest \ + tests/test_auth.py \ + tests/test_auth_config.py \ + tests/test_auth_errors.py \ + tests/test_auth_type_system.py \ + tests/test_auth_middleware.py \ + tests/test_langgraph_auth.py \ + -v + +# 核心接口冒烟 +curl -s $BASE/health # 200 +curl -s $BASE/api/models # 401 (无 cookie) +curl -s -X POST $BASE/api/v1/auth/setup-status # 200 +curl -s $BASE/api/v1/auth/me -b cookies.txt # 200 (有 cookie) +``` diff --git a/backend/docs/AUTH_UPGRADE.md b/backend/docs/AUTH_UPGRADE.md new file mode 100644 index 000000000..344c488c4 --- /dev/null +++ b/backend/docs/AUTH_UPGRADE.md @@ -0,0 +1,129 @@ +# Authentication Upgrade Guide + +DeerFlow 内置了认证模块。本文档面向从无认证版本升级的用户。 + +## 核心概念 + +认证模块采用**始终强制**策略: + +- 首次启动时自动创建 admin 账号,随机密码打印到控制台日志 +- 认证从一开始就是强制的,无竞争窗口 +- 历史对话(升级前创建的 thread)自动迁移到 admin 名下 + +## 升级步骤 + +### 1. 更新代码 + +```bash +git pull origin main +cd backend && make install +``` + +### 2. 首次启动 + +```bash +make dev +``` + +控制台会输出: + +``` +============================================================ + Admin account created on first boot + Email: admin@deerflow.dev + Password: aB3xK9mN_pQ7rT2w + Change it after login: Settings → Account +============================================================ +``` + +如果未登录就重启了服务,不用担心——只要 setup 未完成,每次启动都会重置密码并重新打印到控制台。 + +### 3. 登录 + +访问 `http://localhost:2026/login`,使用控制台输出的邮箱和密码登录。 + +### 4. 修改密码 + +登录后进入 Settings → Account → Change Password。 + +### 5. 添加用户(可选) + +其他用户通过 `/login` 页面注册,自动获得 **user** 角色。每个用户只能看到自己的对话。 + +## 安全机制 + +| 机制 | 说明 | +|------|------| +| JWT HttpOnly Cookie | Token 不暴露给 JavaScript,防止 XSS 窃取 | +| CSRF Double Submit Cookie | 所有 POST/PUT/DELETE 请求需携带 `X-CSRF-Token` | +| bcrypt 密码哈希 | 密码不以明文存储 | +| 多租户隔离 | 用户只能访问自己的 thread | +| HTTPS 自适应 | 检测 `x-forwarded-proto`,自动设置 `Secure` cookie 标志 | + +## 常见操作 + +### 忘记密码 + +```bash +cd backend + +# 重置 admin 密码 +python -m app.gateway.auth.reset_admin + +# 重置指定用户密码 +python -m app.gateway.auth.reset_admin --email user@example.com +``` + +会输出新的随机密码。 + +### 完全重置 + +删除用户数据库,重启后自动创建新 admin: + +```bash +rm -f backend/.deer-flow/users.db +# 重启服务,控制台输出新密码 +``` + +## 数据存储 + +| 文件 | 内容 | +|------|------| +| `.deer-flow/users.db` | SQLite 用户数据库(密码哈希、角色) | +| `.env` 中的 `AUTH_JWT_SECRET` | JWT 签名密钥(未设置时自动生成临时密钥,重启后 session 失效) | + +### 生产环境建议 + +```bash +# 生成持久化 JWT 密钥,避免重启后所有用户需重新登录 +python -c "import secrets; print(secrets.token_urlsafe(32))" +# 将输出添加到 .env: +# AUTH_JWT_SECRET=<生成的密钥> +``` + +## API 端点 + +| 端点 | 方法 | 说明 | +|------|------|------| +| `/api/v1/auth/login/local` | POST | 邮箱密码登录(OAuth2 form) | +| `/api/v1/auth/register` | POST | 注册新用户(user 角色) | +| `/api/v1/auth/logout` | POST | 登出(清除 cookie) | +| `/api/v1/auth/me` | GET | 获取当前用户信息 | +| `/api/v1/auth/change-password` | POST | 修改密码 | +| `/api/v1/auth/setup-status` | GET | 检查 admin 是否存在 | + +## 兼容性 + +- **标准模式**(`make dev`):完全兼容,admin 自动创建 +- **Gateway 模式**(`make dev-pro`):完全兼容 +- **Docker 部署**:完全兼容,`.deer-flow/users.db` 需持久化卷挂载 +- **IM 渠道**(Feishu/Slack/Telegram):通过 LangGraph SDK 通信,不经过认证层 +- **DeerFlowClient**(嵌入式):不经过 HTTP,不受认证影响 + +## 故障排查 + +| 症状 | 原因 | 解决 | +|------|------|------| +| 启动后没看到密码 | admin 已存在(非首次启动) | 用 `reset_admin` 重置,或删 `users.db` | +| 登录后 POST 返回 403 | CSRF token 缺失 | 确认前端已更新 | +| 重启后需要重新登录 | `AUTH_JWT_SECRET` 未持久化 | 在 `.env` 中设置固定密钥 | diff --git a/backend/docs/CONFIGURATION.md b/backend/docs/CONFIGURATION.md index 701c0278e..f87fdd236 100644 --- a/backend/docs/CONFIGURATION.md +++ b/backend/docs/CONFIGURATION.md @@ -259,6 +259,8 @@ sandbox: When you configure `sandbox.mounts`, DeerFlow exposes those `container_path` values in the agent prompt so the agent can discover and operate on mounted directories directly instead of assuming everything must live under `/mnt/user-data`. +For bare-metal Docker sandbox runs that use localhost, DeerFlow binds the sandbox HTTP port to `127.0.0.1` by default so it is not exposed on every host interface. Docker-outside-of-Docker deployments that connect through `host.docker.internal` keep the broad legacy bind for compatibility. Set `DEER_FLOW_SANDBOX_BIND_HOST` explicitly if your deployment needs a different bind address. + ### Skills Configure the skills directory for specialized workflows: @@ -320,6 +322,7 @@ models: - `NOVITA_API_KEY` - Novita API key (OpenAI-compatible endpoint) - `TAVILY_API_KEY` - Tavily search API key - `DEER_FLOW_CONFIG_PATH` - Custom config file path +- `GATEWAY_ENABLE_DOCS` - Set to `false` to disable Swagger UI (`/docs`), ReDoc (`/redoc`), and OpenAPI schema (`/openapi.json`) endpoints (default: `true`) ## Configuration Location diff --git a/backend/docs/HARNESS_APP_SPLIT.md b/backend/docs/HARNESS_APP_SPLIT.md deleted file mode 100644 index cf0e26e51..000000000 --- a/backend/docs/HARNESS_APP_SPLIT.md +++ /dev/null @@ -1,343 +0,0 @@ -# DeerFlow 后端拆分设计文档:Harness + App - -> 状态:Draft -> 作者:DeerFlow Team -> 日期:2026-03-13 - -## 1. 背景与动机 - -DeerFlow 后端当前是一个单一 Python 包(`src.*`),包含了从底层 agent 编排到上层用户产品的所有代码。随着项目发展,这种结构带来了几个问题: - -- **复用困难**:其他产品(CLI 工具、Slack bot、第三方集成)想用 agent 能力,必须依赖整个后端,包括 FastAPI、IM SDK 等不需要的依赖 -- **职责模糊**:agent 编排逻辑和用户产品逻辑混在同一个 `src/` 下,边界不清晰 -- **依赖膨胀**:LangGraph Server 运行时不需要 FastAPI/uvicorn/Slack SDK,但当前必须安装全部依赖 - -本文档提出将后端拆分为两部分:**deerflow-harness**(可发布的 agent 框架包)和 **app**(不打包的用户产品代码)。 - -## 2. 核心概念 - -### 2.1 Harness(线束/框架层) - -Harness 是 agent 的构建与编排框架,回答 **"如何构建和运行 agent"** 的问题: - -- Agent 工厂与生命周期管理 -- Middleware pipeline -- 工具系统(内置工具 + MCP + 社区工具) -- 沙箱执行环境 -- 子 agent 委派 -- 记忆系统 -- 技能加载与注入 -- 模型工厂 -- 配置系统 - -**Harness 是一个可发布的 Python 包**(`deerflow-harness`),可以独立安装和使用。 - -**Harness 的设计原则**:对上层应用完全无感知。它不知道也不关心谁在调用它——可以是 Web App、CLI、Slack Bot、或者一个单元测试。 - -### 2.2 App(应用层) - -App 是面向用户的产品代码,回答 **"如何将 agent 呈现给用户"** 的问题: - -- Gateway API(FastAPI REST 接口) -- IM Channels(飞书、Slack、Telegram 集成) -- Custom Agent 的 CRUD 管理 -- 文件上传/下载的 HTTP 接口 - -**App 不打包、不发布**,它是 DeerFlow 项目内部的应用代码,直接运行。 - -**App 依赖 Harness,但 Harness 不依赖 App。** - -### 2.3 边界划分 - -| 模块 | 归属 | 说明 | -|------|------|------| -| `config/` | Harness | 配置系统是基础设施 | -| `reflection/` | Harness | 动态模块加载工具 | -| `utils/` | Harness | 通用工具函数 | -| `agents/` | Harness | Agent 工厂、middleware、state、memory | -| `subagents/` | Harness | 子 agent 委派系统 | -| `sandbox/` | Harness | 沙箱执行环境 | -| `tools/` | Harness | 工具注册与发现 | -| `mcp/` | Harness | MCP 协议集成 | -| `skills/` | Harness | 技能加载、解析、定义 schema | -| `models/` | Harness | LLM 模型工厂 | -| `community/` | Harness | 社区工具(tavily、jina 等) | -| `client.py` | Harness | 嵌入式 Python 客户端 | -| `gateway/` | App | FastAPI REST API | -| `channels/` | App | IM 平台集成 | - -**关于 Custom Agents**:agent 定义格式(`config.yaml` + `SOUL.md` schema)由 Harness 层的 `config/agents_config.py` 定义,但文件的存储、CRUD、发现机制由 App 层的 `gateway/routers/agents.py` 负责。 - -## 3. 目标架构 - -### 3.1 目录结构 - -``` -backend/ -├── packages/ -│ └── harness/ -│ ├── pyproject.toml # deerflow-harness 包定义 -│ └── deerflow/ # Python 包根(import 前缀: deerflow.*) -│ ├── __init__.py -│ ├── config/ -│ ├── reflection/ -│ ├── utils/ -│ ├── agents/ -│ │ ├── lead_agent/ -│ │ ├── middlewares/ -│ │ ├── memory/ -│ │ ├── checkpointer/ -│ │ └── thread_state.py -│ ├── subagents/ -│ ├── sandbox/ -│ ├── tools/ -│ ├── mcp/ -│ ├── skills/ -│ ├── models/ -│ ├── community/ -│ └── client.py -├── app/ # 不打包(import 前缀: app.*) -│ ├── __init__.py -│ ├── gateway/ -│ │ ├── __init__.py -│ │ ├── app.py -│ │ ├── config.py -│ │ ├── path_utils.py -│ │ └── routers/ -│ └── channels/ -│ ├── __init__.py -│ ├── base.py -│ ├── manager.py -│ ├── service.py -│ ├── store.py -│ ├── message_bus.py -│ ├── feishu.py -│ ├── slack.py -│ └── telegram.py -├── pyproject.toml # uv workspace root -├── langgraph.json -├── tests/ -├── docs/ -└── Makefile -``` - -### 3.2 Import 规则 - -两个层使用不同的 import 前缀,职责边界一目了然: - -```python -# --------------------------------------------------------------- -# Harness 内部互相引用(deerflow.* 前缀) -# --------------------------------------------------------------- -from deerflow.agents import make_lead_agent -from deerflow.models import create_chat_model -from deerflow.config import get_app_config -from deerflow.tools import get_available_tools - -# --------------------------------------------------------------- -# App 内部互相引用(app.* 前缀) -# --------------------------------------------------------------- -from app.gateway.app import app -from app.gateway.routers.uploads import upload_files -from app.channels.service import start_channel_service - -# --------------------------------------------------------------- -# App 调用 Harness(单向依赖,Harness 永远不 import app) -# --------------------------------------------------------------- -from deerflow.agents import make_lead_agent -from deerflow.models import create_chat_model -from deerflow.skills import load_skills -from deerflow.config.extensions_config import get_extensions_config -``` - -**App 调用 Harness 示例 — Gateway 中启动 agent**: - -```python -# app/gateway/routers/chat.py -from deerflow.agents.lead_agent.agent import make_lead_agent -from deerflow.models import create_chat_model -from deerflow.config import get_app_config - -async def create_chat_session(thread_id: str, model_name: str): - config = get_app_config() - model = create_chat_model(name=model_name) - agent = make_lead_agent(config=...) - # ... 使用 agent 处理用户消息 -``` - -**App 调用 Harness 示例 — Channel 中查询 skills**: - -```python -# app/channels/manager.py -from deerflow.skills import load_skills -from deerflow.agents.memory.updater import get_memory_data - -def handle_status_command(): - skills = load_skills(enabled_only=True) - memory = get_memory_data() - return f"Skills: {len(skills)}, Memory facts: {len(memory.get('facts', []))}" -``` - -**禁止方向**:Harness 代码中绝不能出现 `from app.` 或 `import app.`。 - -### 3.3 为什么 App 不打包 - -| 方面 | 打包(放 packages/ 下) | 不打包(放 backend/app/) | -|------|------------------------|--------------------------| -| 命名空间 | 需要 pkgutil `extend_path` 合并,或独立前缀 | 天然独立,`app.*` vs `deerflow.*` | -| 发布需求 | 没有——App 是项目内部代码 | 不需要 pyproject.toml | -| 复杂度 | 需要管理两个包的构建、版本、依赖声明 | 直接运行,零额外配置 | -| 运行方式 | `pip install deerflow-app` | `PYTHONPATH=. uvicorn app.gateway.app:app` | - -App 的唯一消费者是 DeerFlow 项目自身,没有独立发布的需求。放在 `backend/app/` 下作为普通 Python 包,通过 `PYTHONPATH` 或 editable install 让 Python 找到即可。 - -### 3.4 依赖关系 - -``` -┌─────────────────────────────────────┐ -│ app/ (不打包,直接运行) │ -│ ├── fastapi, uvicorn │ -│ ├── slack-sdk, lark-oapi, ... │ -│ └── import deerflow.* │ -└──────────────┬──────────────────────┘ - │ - ▼ -┌─────────────────────────────────────┐ -│ deerflow-harness (可发布的包) │ -│ ├── langgraph, langchain │ -│ ├── markitdown, pydantic, ... │ -│ └── 零 app 依赖 │ -└─────────────────────────────────────┘ -``` - -**依赖分类**: - -| 分类 | 依赖包 | -|------|--------| -| Harness only | agent-sandbox, langchain*, langgraph*, markdownify, markitdown, pydantic, pyyaml, readabilipy, tavily-python, firecrawl-py, tiktoken, ddgs, duckdb, httpx, kubernetes, dotenv | -| App only | fastapi, uvicorn, sse-starlette, python-multipart, lark-oapi, slack-sdk, python-telegram-bot, markdown-to-mrkdwn | -| Shared | langgraph-sdk(channels 用 HTTP client), pydantic, httpx | - -### 3.5 Workspace 配置 - -`backend/pyproject.toml`(workspace root): - -```toml -[project] -name = "deer-flow" -version = "0.1.0" -requires-python = ">=3.12" -dependencies = ["deerflow-harness"] - -[dependency-groups] -dev = ["pytest>=8.0.0", "ruff>=0.14.11"] -# App 的额外依赖(fastapi 等)也声明在 workspace root,因为 app 不打包 -app = ["fastapi", "uvicorn", "sse-starlette", "python-multipart"] -channels = ["lark-oapi", "slack-sdk", "python-telegram-bot"] - -[tool.uv.workspace] -members = ["packages/harness"] - -[tool.uv.sources] -deerflow-harness = { workspace = true } -``` - -## 4. 当前的跨层依赖问题 - -在拆分之前,需要先解决 `client.py` 中两处从 harness 到 app 的反向依赖: - -### 4.1 `_validate_skill_frontmatter` - -```python -# client.py — harness 导入了 app 层代码 -from src.gateway.routers.skills import _validate_skill_frontmatter -``` - -**解决方案**:将该函数提取到 `deerflow/skills/validation.py`。这是一个纯逻辑函数(解析 YAML frontmatter、校验字段),与 FastAPI 无关。 - -### 4.2 `CONVERTIBLE_EXTENSIONS` + `convert_file_to_markdown` - -```python -# client.py — harness 导入了 app 层代码 -from src.gateway.routers.uploads import CONVERTIBLE_EXTENSIONS, convert_file_to_markdown -``` - -**解决方案**:将它们提取到 `deerflow/utils/file_conversion.py`。仅依赖 `markitdown` + `pathlib`,是通用工具函数。 - -## 5. 基础设施变更 - -### 5.1 LangGraph Server - -LangGraph Server 只需要 harness 包。`langgraph.json` 更新: - -```json -{ - "dependencies": ["./packages/harness"], - "graphs": { - "lead_agent": "deerflow.agents:make_lead_agent" - }, - "checkpointer": { - "path": "./packages/harness/deerflow/agents/checkpointer/async_provider.py:make_checkpointer" - } -} -``` - -### 5.2 Gateway API - -```bash -# serve.sh / Makefile -# PYTHONPATH 包含 backend/ 根目录,使 app.* 和 deerflow.* 都能被找到 -PYTHONPATH=. uvicorn app.gateway.app:app --host 0.0.0.0 --port 8001 -``` - -### 5.3 Nginx - -无需变更(只做 URL 路由,不涉及 Python 模块路径)。 - -### 5.4 Docker - -Dockerfile 中的 module 引用从 `src.` 改为 `deerflow.` / `app.`,`COPY` 命令需覆盖 `packages/` 和 `app/` 目录。 - -## 6. 实施计划 - -分 3 个 PR 递进执行: - -### PR 1:提取共享工具函数(Low Risk) - -1. 创建 `src/skills/validation.py`,从 `gateway/routers/skills.py` 提取 `_validate_skill_frontmatter` -2. 创建 `src/utils/file_conversion.py`,从 `gateway/routers/uploads.py` 提取文件转换逻辑 -3. 更新 `client.py`、`gateway/routers/skills.py`、`gateway/routers/uploads.py` 的 import -4. 运行全部测试确认无回归 - -### PR 2:Rename + 物理拆分(High Risk,原子操作) - -1. 创建 `packages/harness/` 目录,创建 `pyproject.toml` -2. `git mv` 将 harness 相关模块从 `src/` 移入 `packages/harness/deerflow/` -3. `git mv` 将 app 相关模块从 `src/` 移入 `app/` -4. 全局替换 import: - - harness 模块:`src.*` → `deerflow.*`(所有 `.py` 文件、`langgraph.json`、测试、文档) - - app 模块:`src.gateway.*` → `app.gateway.*`、`src.channels.*` → `app.channels.*` -5. 更新 workspace root `pyproject.toml` -6. 更新 `langgraph.json`、`Makefile`、`Dockerfile` -7. `uv sync` + 全部测试 + 手动验证服务启动 - -### PR 3:边界检查 + 文档(Low Risk) - -1. 添加 lint 规则:检查 harness 不 import app 模块 -2. 更新 `CLAUDE.md`、`README.md` - -## 7. 风险与缓解 - -| 风险 | 影响 | 缓解措施 | -|------|------|----------| -| 全局 rename 误伤 | 字符串中的 `src` 被错误替换 | 正则精确匹配 `\bsrc\.`,review diff | -| LangGraph Server 找不到模块 | 服务启动失败 | `langgraph.json` 的 `dependencies` 指向正确的 harness 包路径 | -| App 的 `PYTHONPATH` 缺失 | Gateway/Channel 启动 import 报错 | Makefile/Docker 统一设置 `PYTHONPATH=.` | -| `config.yaml` 中的 `use` 字段引用旧路径 | 运行时模块解析失败 | `config.yaml` 中的 `use` 字段同步更新为 `deerflow.*` | -| 测试中 `sys.path` 混乱 | 测试失败 | 用 editable install(`uv sync`)确保 deerflow 可导入,`conftest.py` 中添加 `app/` 到 `sys.path` | - -## 8. 未来演进 - -- **独立发布**:harness 可以发布到内部 PyPI,让其他项目直接 `pip install deerflow-harness` -- **插件化 App**:不同的 app(web、CLI、bot)可以各自独立,都依赖同一个 harness -- **更细粒度拆分**:如果 harness 内部模块继续增长,可以进一步拆分(如 `deerflow-sandbox`、`deerflow-mcp`) diff --git a/backend/docs/MCP_SERVER.md b/backend/docs/MCP_SERVER.md index efe2ea0c4..b7320f8cc 100644 --- a/backend/docs/MCP_SERVER.md +++ b/backend/docs/MCP_SERVER.md @@ -45,6 +45,41 @@ Example: } ``` +## Custom Tool Interceptors + +You can register custom interceptors that run before every MCP tool call. This is useful for injecting per-request headers (e.g., user auth tokens from the LangGraph execution context), logging, or metrics. + +Declare interceptors in `extensions_config.json` using the `mcpInterceptors` field: + +```json +{ + "mcpInterceptors": [ + "my_package.mcp.auth:build_auth_interceptor" + ], + "mcpServers": { ... } +} +``` + +Each entry is a Python import path in `module:variable` format (resolved via `resolve_variable`). The variable must be a **no-arg builder function** that returns an async interceptor compatible with `MultiServerMCPClient`’s `tool_interceptors` interface, or `None` to skip. + +Example interceptor that injects auth headers from LangGraph metadata: + +```python +def build_auth_interceptor(): + async def interceptor(request, handler): + from langgraph.config import get_config + metadata = get_config().get("metadata", {}) + headers = dict(request.headers or {}) + if token := metadata.get("auth_token"): + headers["X-Auth-Token"] = token + return await handler(request.override(headers=headers)) + return interceptor +``` + +- A single string value is accepted and normalized to a one-element list. +- Invalid paths or builder failures are logged as warnings without blocking other interceptors. +- The builder return value must be `callable`; non-callable values are skipped with a warning. + ## How It Works MCP servers expose tools that are automatically discovered and integrated into DeerFlow’s agent system at runtime. Once enabled, these tools become available to agents without additional code changes. diff --git a/backend/docs/TITLE_GENERATION_IMPLEMENTATION.md b/backend/docs/TITLE_GENERATION_IMPLEMENTATION.md index 07a026e79..87e8aa61a 100644 --- a/backend/docs/TITLE_GENERATION_IMPLEMENTATION.md +++ b/backend/docs/TITLE_GENERATION_IMPLEMENTATION.md @@ -124,7 +124,7 @@ title: # checkpointer.py from langgraph.checkpoint.sqlite import SqliteSaver -checkpointer = SqliteSaver.from_conn_string("checkpoints.db") +checkpointer = SqliteSaver.from_conn_string("deerflow.db") ``` ```json diff --git a/backend/docs/summarization.md b/backend/docs/summarization.md index ca1e8dda1..773d27e3d 100644 --- a/backend/docs/summarization.md +++ b/backend/docs/summarization.md @@ -41,6 +41,13 @@ summarization: # Custom summary prompt (optional) summary_prompt: null + + # Tool names treated as skill file reads for skill rescue + skill_file_read_tool_names: + - read_file + - read + - view + - cat ``` ### Configuration Options @@ -125,6 +132,26 @@ keep: - **Default**: `null` (uses LangChain's default prompt) - **Description**: Custom prompt template for generating summaries. The prompt should guide the model to extract the most important context. +#### `preserve_recent_skill_count` +- **Type**: Integer (≥ 0) +- **Default**: `5` +- **Description**: Number of most-recently-loaded skill files (tool results whose tool name is in `skill_file_read_tool_names` and whose target path is under `skills.container_path`, e.g. `/mnt/skills/...`) that are rescued from summarization. Prevents the agent from losing skill instructions after compression. Set to `0` to disable skill rescue entirely. + +#### `preserve_recent_skill_tokens` +- **Type**: Integer (≥ 0) +- **Default**: `25000` +- **Description**: Total token budget reserved for rescued skill reads. Once this budget is exhausted, older skill bundles are allowed to be summarized. + +#### `preserve_recent_skill_tokens_per_skill` +- **Type**: Integer (≥ 0) +- **Default**: `5000` +- **Description**: Per-skill token cap. Any individual skill read whose tool result exceeds this size is not rescued (it falls through to the summarizer like ordinary content). + +#### `skill_file_read_tool_names` +- **Type**: List of strings +- **Default**: `["read_file", "read", "view", "cat"]` +- **Description**: Tool names treated as skill file reads during summarization rescue. A tool call is only eligible for skill rescue when its name appears in this list and its target path is under `skills.container_path`. + **Default Prompt Behavior:** The default LangChain prompt instructs the model to: - Extract highest quality/most relevant context @@ -147,6 +174,7 @@ The default LangChain prompt instructs the model to: - A single summary message is added - Recent messages are preserved 6. **AI/Tool Pair Protection**: The system ensures AI messages and their corresponding tool messages stay together +7. **Skill Rescue**: Before the summary is generated, the most recently loaded skill files (tool results whose tool name is in `skill_file_read_tool_names` and whose target path is under `skills.container_path`) are lifted out of the summarization set and prepended to the preserved tail. Selection walks newest-first under three budgets: `preserve_recent_skill_count`, `preserve_recent_skill_tokens`, and `preserve_recent_skill_tokens_per_skill`. The triggering AIMessage and all of its paired ToolMessages move together so tool_call ↔ tool_result pairing stays intact. ### Token Counting diff --git a/backend/langgraph.json b/backend/langgraph.json index 74f5c691d..8ecba0381 100644 --- a/backend/langgraph.json +++ b/backend/langgraph.json @@ -8,7 +8,10 @@ "graphs": { "lead_agent": "deerflow.agents:make_lead_agent" }, + "auth": { + "path": "./app/gateway/langgraph_auth.py:auth" + }, "checkpointer": { - "path": "./packages/harness/deerflow/agents/checkpointer/async_provider.py:make_checkpointer" + "path": "./packages/harness/deerflow/runtime/checkpointer/async_provider.py:make_checkpointer" } } diff --git a/backend/packages/harness/deerflow/agents/__init__.py b/backend/packages/harness/deerflow/agents/__init__.py index 2c31a514a..397f67f8e 100644 --- a/backend/packages/harness/deerflow/agents/__init__.py +++ b/backend/packages/harness/deerflow/agents/__init__.py @@ -1,4 +1,3 @@ -from .checkpointer import get_checkpointer, make_checkpointer, reset_checkpointer from .factory import create_deerflow_agent from .features import Next, Prev, RuntimeFeatures from .lead_agent import make_lead_agent @@ -18,7 +17,4 @@ __all__ = [ "make_lead_agent", "SandboxState", "ThreadState", - "get_checkpointer", - "reset_checkpointer", - "make_checkpointer", ] diff --git a/backend/packages/harness/deerflow/agents/factory.py b/backend/packages/harness/deerflow/agents/factory.py index 57361edba..bd57d733d 100644 --- a/backend/packages/harness/deerflow/agents/factory.py +++ b/backend/packages/harness/deerflow/agents/factory.py @@ -254,9 +254,11 @@ def _assemble_from_features( from deerflow.agents.middlewares.view_image_middleware import ViewImageMiddleware chain.append(ViewImageMiddleware()) - from deerflow.tools.builtins import view_image_tool - extra_tools.append(view_image_tool) + if feat.sandbox is not False: + from deerflow.tools.builtins import view_image_tool + + extra_tools.append(view_image_tool) # --- [11] Subagent --- if feat.subagent is not False: diff --git a/backend/packages/harness/deerflow/agents/lead_agent/agent.py b/backend/packages/harness/deerflow/agents/lead_agent/agent.py index de3ff6766..12fedd5b2 100644 --- a/backend/packages/harness/deerflow/agents/lead_agent/agent.py +++ b/backend/packages/harness/deerflow/agents/lead_agent/agent.py @@ -18,7 +18,7 @@ from deerflow.agents.middlewares.tool_error_handling_middleware import build_lea from deerflow.agents.middlewares.view_image_middleware import ViewImageMiddleware from deerflow.agents.thread_state import ThreadState from deerflow.config.agents_config import load_agent_config, validate_agent_name -from deerflow.config.app_config import get_app_config +from deerflow.config.app_config import AppConfig, get_app_config from deerflow.config.memory_config import get_memory_config from deerflow.config.summarization_config import get_summarization_config from deerflow.models import create_chat_model @@ -26,9 +26,18 @@ from deerflow.models import create_chat_model logger = logging.getLogger(__name__) -def _resolve_model_name(requested_model_name: str | None = None) -> str: +def _get_runtime_config(config: RunnableConfig) -> dict: + """Merge legacy configurable options with LangGraph runtime context.""" + cfg = dict(config.get("configurable", {}) or {}) + context = config.get("context", {}) or {} + if isinstance(context, dict): + cfg.update(context) + return cfg + + +def _resolve_model_name(requested_model_name: str | None = None, *, app_config: AppConfig | None = None) -> str: """Resolve a runtime model name safely, falling back to default if invalid. Returns None if no models are configured.""" - app_config = get_app_config() + app_config = app_config or get_app_config() default_model_name = app_config.models[0].name if app_config.models else None if default_model_name is None: raise ValueError("No chat models are configured. Please configure at least one model in config.yaml.") @@ -41,7 +50,7 @@ def _resolve_model_name(requested_model_name: str | None = None) -> str: return default_model_name -def _create_summarization_middleware() -> DeerFlowSummarizationMiddleware | None: +def _create_summarization_middleware(*, app_config: AppConfig | None = None) -> DeerFlowSummarizationMiddleware | None: """Create and configure the summarization middleware from config.""" config = get_summarization_config() @@ -59,13 +68,15 @@ def _create_summarization_middleware() -> DeerFlowSummarizationMiddleware | None # Prepare keep parameter keep = config.keep.to_tuple() - # Prepare model parameter + # Prepare model parameter. + # Bind "middleware:summarize" tag so RunJournal identifies these LLM calls + # as middleware rather than lead_agent (SummarizationMiddleware is a + # LangChain built-in, so we tag the model at creation time). if config.model_name: - model = create_chat_model(name=config.model_name, thinking_enabled=False) + model = create_chat_model(name=config.model_name, thinking_enabled=False, app_config=app_config) else: - # Use a lightweight model for summarization to save costs - # Falls back to default model if not explicitly specified - model = create_chat_model(thinking_enabled=False) + model = create_chat_model(thinking_enabled=False, app_config=app_config) + model = model.with_config(tags=["middleware:summarize"]) # Prepare kwargs kwargs = { @@ -84,7 +95,25 @@ def _create_summarization_middleware() -> DeerFlowSummarizationMiddleware | None if get_memory_config().enabled: hooks.append(memory_flush_hook) - return DeerFlowSummarizationMiddleware(**kwargs, before_summarization=hooks) + # The logic below relies on two assumptions holding true: this factory is + # the sole entry point for DeerFlowSummarizationMiddleware, and the runtime + # config is not expected to change after startup. + try: + resolved_app_config = app_config or get_app_config() + skills_container_path = resolved_app_config.skills.container_path or "/mnt/skills" + except Exception: + logger.exception("Failed to resolve skills container path; falling back to default") + skills_container_path = "/mnt/skills" + + return DeerFlowSummarizationMiddleware( + **kwargs, + skills_container_path=skills_container_path, + skill_file_read_tool_names=config.skill_file_read_tool_names, + before_summarization=hooks, + preserve_recent_skill_count=config.preserve_recent_skill_count, + preserve_recent_skill_tokens=config.preserve_recent_skill_tokens, + preserve_recent_skill_tokens_per_skill=config.preserve_recent_skill_tokens_per_skill, + ) def _create_todo_list_middleware(is_plan_mode: bool) -> TodoMiddleware | None: @@ -212,7 +241,14 @@ Being proactive with task management demonstrates thoroughness and ensures all r # ViewImageMiddleware should be before ClarificationMiddleware to inject image details before LLM # ToolErrorHandlingMiddleware should be before ClarificationMiddleware to convert tool exceptions to ToolMessages # ClarificationMiddleware should be last to intercept clarification requests after model calls -def _build_middlewares(config: RunnableConfig, model_name: str | None, agent_name: str | None = None, custom_middlewares: list[AgentMiddleware] | None = None): +def _build_middlewares( + config: RunnableConfig, + model_name: str | None, + agent_name: str | None = None, + custom_middlewares: list[AgentMiddleware] | None = None, + *, + app_config: AppConfig | None = None, +): """Build middleware chain based on runtime configuration. Args: @@ -223,21 +259,23 @@ def _build_middlewares(config: RunnableConfig, model_name: str | None, agent_nam Returns: List of middleware instances. """ - middlewares = build_lead_runtime_middlewares(lazy_init=True) + resolved_app_config = app_config or get_app_config() + middlewares = build_lead_runtime_middlewares(app_config=resolved_app_config, lazy_init=True) # Add summarization middleware if enabled - summarization_middleware = _create_summarization_middleware() + summarization_middleware = _create_summarization_middleware(app_config=resolved_app_config) if summarization_middleware is not None: middlewares.append(summarization_middleware) # Add TodoList middleware if plan mode is enabled - is_plan_mode = config.get("configurable", {}).get("is_plan_mode", False) + cfg = _get_runtime_config(config) + is_plan_mode = cfg.get("is_plan_mode", False) todo_list_middleware = _create_todo_list_middleware(is_plan_mode) if todo_list_middleware is not None: middlewares.append(todo_list_middleware) # Add TokenUsageMiddleware when token_usage tracking is enabled - if get_app_config().token_usage.enabled: + if resolved_app_config.token_usage.enabled: middlewares.append(TokenUsageMiddleware()) # Add TitleMiddleware @@ -248,21 +286,20 @@ def _build_middlewares(config: RunnableConfig, model_name: str | None, agent_nam # Add ViewImageMiddleware only if the current model supports vision. # Use the resolved runtime model_name from make_lead_agent to avoid stale config values. - app_config = get_app_config() - model_config = app_config.get_model_config(model_name) if model_name else None + model_config = resolved_app_config.get_model_config(model_name) if model_name else None if model_config is not None and model_config.supports_vision: middlewares.append(ViewImageMiddleware()) # Add DeferredToolFilterMiddleware to hide deferred tool schemas from model binding - if app_config.tool_search.enabled: + if resolved_app_config.tool_search.enabled: from deerflow.agents.middlewares.deferred_tool_filter_middleware import DeferredToolFilterMiddleware middlewares.append(DeferredToolFilterMiddleware()) # Add SubagentLimitMiddleware to truncate excess parallel task calls - subagent_enabled = config.get("configurable", {}).get("subagent_enabled", False) + subagent_enabled = cfg.get("subagent_enabled", False) if subagent_enabled: - max_concurrent_subagents = config.get("configurable", {}).get("max_concurrent_subagents", 3) + max_concurrent_subagents = cfg.get("max_concurrent_subagents", 3) middlewares.append(SubagentLimitMiddleware(max_concurrent=max_concurrent_subagents)) # LoopDetectionMiddleware — detect and break repetitive tool call loops @@ -278,11 +315,17 @@ def _build_middlewares(config: RunnableConfig, model_name: str | None, agent_nam def make_lead_agent(config: RunnableConfig): + """LangGraph graph factory; keep the signature compatible with LangGraph Server.""" + return _make_lead_agent(config, app_config=get_app_config()) + + +def _make_lead_agent(config: RunnableConfig, *, app_config: AppConfig): # Lazy import to avoid circular dependency from deerflow.tools import get_available_tools from deerflow.tools.builtins import setup_agent - cfg = config.get("configurable", {}) + cfg = _get_runtime_config(config) + resolved_app_config = app_config thinking_enabled = cfg.get("thinking_enabled", True) reasoning_effort = cfg.get("reasoning_effort", None) @@ -298,10 +341,9 @@ def make_lead_agent(config: RunnableConfig): agent_model_name = agent_config.model if agent_config and agent_config.model else None # Final model name resolution: request → agent config → global default, with fallback for unknown names - model_name = _resolve_model_name(requested_model_name or agent_model_name) + model_name = _resolve_model_name(requested_model_name or agent_model_name, app_config=resolved_app_config) - app_config = get_app_config() - model_config = app_config.get_model_config(model_name) + model_config = resolved_app_config.get_model_config(model_name) if model_config is None: raise ValueError("No chat model could be resolved. Please configure at least one model in config.yaml or provide a valid 'model_name'/'model' in the request.") @@ -333,26 +375,41 @@ def make_lead_agent(config: RunnableConfig): "is_plan_mode": is_plan_mode, "subagent_enabled": subagent_enabled, "tool_groups": agent_config.tool_groups if agent_config else None, + "available_skills": ["bootstrap"] if is_bootstrap else (agent_config.skills if agent_config and agent_config.skills is not None else None), } ) if is_bootstrap: # Special bootstrap agent with minimal prompt for initial custom agent creation flow return create_agent( - model=create_chat_model(name=model_name, thinking_enabled=thinking_enabled), - tools=get_available_tools(model_name=model_name, subagent_enabled=subagent_enabled) + [setup_agent], - middleware=_build_middlewares(config, model_name=model_name), - system_prompt=apply_prompt_template(subagent_enabled=subagent_enabled, max_concurrent_subagents=max_concurrent_subagents, available_skills=set(["bootstrap"])), + model=create_chat_model(name=model_name, thinking_enabled=thinking_enabled, app_config=resolved_app_config), + tools=get_available_tools(model_name=model_name, subagent_enabled=subagent_enabled, app_config=resolved_app_config) + [setup_agent], + middleware=_build_middlewares(config, model_name=model_name, app_config=resolved_app_config), + system_prompt=apply_prompt_template( + subagent_enabled=subagent_enabled, + max_concurrent_subagents=max_concurrent_subagents, + available_skills=set(["bootstrap"]), + app_config=resolved_app_config, + ), state_schema=ThreadState, ) # Default lead agent (unchanged behavior) return create_agent( - model=create_chat_model(name=model_name, thinking_enabled=thinking_enabled, reasoning_effort=reasoning_effort), - tools=get_available_tools(model_name=model_name, groups=agent_config.tool_groups if agent_config else None, subagent_enabled=subagent_enabled), - middleware=_build_middlewares(config, model_name=model_name, agent_name=agent_name), + model=create_chat_model(name=model_name, thinking_enabled=thinking_enabled, reasoning_effort=reasoning_effort, app_config=resolved_app_config), + tools=get_available_tools( + model_name=model_name, + groups=agent_config.tool_groups if agent_config else None, + subagent_enabled=subagent_enabled, + app_config=resolved_app_config, + ), + middleware=_build_middlewares(config, model_name=model_name, agent_name=agent_name, app_config=resolved_app_config), system_prompt=apply_prompt_template( - subagent_enabled=subagent_enabled, max_concurrent_subagents=max_concurrent_subagents, agent_name=agent_name, available_skills=set(agent_config.skills) if agent_config and agent_config.skills is not None else None + subagent_enabled=subagent_enabled, + max_concurrent_subagents=max_concurrent_subagents, + agent_name=agent_name, + available_skills=set(agent_config.skills) if agent_config and agent_config.skills is not None else None, + app_config=resolved_app_config, ), state_schema=ThreadState, ) diff --git a/backend/packages/harness/deerflow/agents/lead_agent/prompt.py b/backend/packages/harness/deerflow/agents/lead_agent/prompt.py index dda49a1de..9b6fd9cd4 100644 --- a/backend/packages/harness/deerflow/agents/lead_agent/prompt.py +++ b/backend/packages/harness/deerflow/agents/lead_agent/prompt.py @@ -1,14 +1,20 @@ +from __future__ import annotations + import asyncio import logging import threading from datetime import datetime from functools import lru_cache +from typing import TYPE_CHECKING from deerflow.config.agents_config import load_agent_soul -from deerflow.skills import load_skills -from deerflow.skills.types import Skill +from deerflow.skills.storage import get_or_new_skill_storage +from deerflow.skills.types import Skill, SkillCategory from deerflow.subagents import get_available_subagent_names +if TYPE_CHECKING: + from deerflow.config.app_config import AppConfig + logger = logging.getLogger(__name__) _ENABLED_SKILLS_REFRESH_WAIT_TIMEOUT_SECONDS = 5.0 @@ -20,7 +26,7 @@ _enabled_skills_refresh_event = threading.Event() def _load_enabled_skills_sync() -> list[Skill]: - return list(load_skills(enabled_only=True)) + return list(get_or_new_skill_storage().load_skills(enabled_only=True)) def _start_enabled_skills_refresh_thread() -> None: @@ -111,8 +117,21 @@ def _get_enabled_skills(): return [] -def _skill_mutability_label(category: str) -> str: - return "[custom, editable]" if category == "custom" else "[built-in]" +def _get_enabled_skills_for_config(app_config: AppConfig | None = None) -> list[Skill]: + """Return enabled skills using the caller's config source. + + When a concrete ``app_config`` is supplied, bypass the global enabled-skills + cache so the skill list and skill paths are resolved from the same config + object. This keeps request-scoped config injection consistent even while the + release branch still supports global fallback paths. + """ + if app_config is None: + return _get_enabled_skills() + return list(get_or_new_skill_storage(app_config=app_config).load_skills(enabled_only=True)) + + +def _skill_mutability_label(category: SkillCategory | str) -> str: + return "[custom, editable]" if category == SkillCategory.CUSTOM else "[built-in]" def clear_skills_system_prompt_cache() -> None: @@ -123,31 +142,6 @@ async def refresh_skills_system_prompt_cache_async() -> None: await asyncio.to_thread(_invalidate_enabled_skills_cache().wait) -def _reset_skills_system_prompt_cache_state() -> None: - global _enabled_skills_cache, _enabled_skills_refresh_active, _enabled_skills_refresh_version - - _get_cached_skills_prompt_section.cache_clear() - with _enabled_skills_lock: - _enabled_skills_cache = None - _enabled_skills_refresh_active = False - _enabled_skills_refresh_version = 0 - _enabled_skills_refresh_event.clear() - - -def _refresh_enabled_skills_cache() -> None: - """Backward-compatible test helper for direct synchronous reload.""" - try: - skills = _load_enabled_skills_sync() - except Exception: - logger.exception("Failed to load enabled skills for prompt injection") - skills = [] - - with _enabled_skills_lock: - _enabled_skills_cache = skills - _enabled_skills_refresh_active = False - _enabled_skills_refresh_event.set() - - def _build_skill_evolution_section(skill_evolution_enabled: bool) -> str: if not skill_evolution_enabled: return "" @@ -164,6 +158,36 @@ Skip simple one-off tasks. """ +def _build_available_subagents_description(available_names: list[str], bash_available: bool) -> str: + """Dynamically build subagent type descriptions from registry. + + Mirrors Codex's pattern where agent_type_description is dynamically generated + from all registered roles, so the LLM knows about every available type. + """ + # Built-in descriptions (kept for backward compatibility with existing prompt quality) + builtin_descriptions = { + "general-purpose": "For ANY non-trivial task - web research, code exploration, file operations, analysis, etc.", + "bash": ( + "For command execution (git, build, test, deploy operations)" if bash_available else "Not available in the current sandbox configuration. Use direct file/web tools or switch to AioSandboxProvider for isolated shell access." + ), + } + + # Lazy import moved outside loop to avoid repeated import overhead + from deerflow.subagents.registry import get_subagent_config + + lines = [] + for name in available_names: + if name in builtin_descriptions: + lines.append(f"- **{name}**: {builtin_descriptions[name]}") + else: + config = get_subagent_config(name) + if config is not None: + desc = config.description.split("\n")[0].strip() # First line only for brevity + lines.append(f"- **{name}**: {desc}") + + return "\n".join(lines) + + def _build_subagent_section(max_concurrent: int) -> str: """Build the subagent system prompt section with dynamic concurrency limit. @@ -174,13 +198,12 @@ def _build_subagent_section(max_concurrent: int) -> str: Formatted subagent section string. """ n = max_concurrent - bash_available = "bash" in get_available_subagent_names() - available_subagents = ( - "- **general-purpose**: For ANY non-trivial task - web research, code exploration, file operations, analysis, etc.\n- **bash**: For command execution (git, build, test, deploy operations)" - if bash_available - else "- **general-purpose**: For ANY non-trivial task - web research, code exploration, file operations, analysis, etc.\n" - "- **bash**: Not available in the current sandbox configuration. Use direct file/web tools or switch to AioSandboxProvider for isolated shell access." - ) + available_names = get_available_subagent_names() + bash_available = "bash" in available_names + + # Dynamically build subagent type descriptions from registry (aligned with Codex's + # agent_type_description pattern where all registered roles are listed in the tool spec). + available_subagents = _build_available_subagents_description(available_names, bash_available) direct_tool_examples = "bash, ls, read_file, web_search, etc." if bash_available else "ls, read_file, web_search, etc." direct_execution_example = ( '# User asks: "Run the tests"\n# Thinking: Cannot decompose into parallel sub-tasks\n# → Execute directly\n\nbash("npm test") # Direct execution, not task()' @@ -519,12 +542,13 @@ def _get_memory_context(agent_name: str | None = None) -> str: try: from deerflow.agents.memory import format_memory_for_injection, get_memory_data from deerflow.config.memory_config import get_memory_config + from deerflow.runtime.user_context import get_effective_user_id config = get_memory_config() if not config.enabled or not config.injection_enabled: return "" - memory_data = get_memory_data(agent_name) + memory_data = get_memory_data(agent_name, user_id=get_effective_user_id()) memory_content = format_memory_for_injection(memory_data, max_tokens=config.max_injection_tokens) if not memory_content.strip(): @@ -571,14 +595,14 @@ You have access to skills that provide optimized workflows for specific tasks. E """ -def get_skills_prompt_section(available_skills: set[str] | None = None) -> str: +def get_skills_prompt_section(available_skills: set[str] | None = None, *, app_config: AppConfig | None = None) -> str: """Generate the skills prompt section with available skills list.""" - skills = _get_enabled_skills() + skills = _get_enabled_skills_for_config(app_config) try: from deerflow.config import get_app_config - config = get_app_config() + config = app_config or get_app_config() container_base_path = config.skills.container_path skill_evolution_enabled = config.skill_evolution.enabled except Exception: @@ -607,7 +631,7 @@ def get_agent_soul(agent_name: str | None) -> str: return "" -def get_deferred_tools_prompt_section() -> str: +def get_deferred_tools_prompt_section(*, app_config: AppConfig | None = None) -> str: """Generate block for the system prompt. Lists only deferred tool names so the agent knows what exists @@ -619,7 +643,8 @@ def get_deferred_tools_prompt_section() -> str: try: from deerflow.config import get_app_config - if not get_app_config().tool_search.enabled: + config = app_config or get_app_config() + if not config.tool_search.enabled: return "" except Exception: return "" @@ -652,12 +677,13 @@ def _build_acp_section() -> str: ) -def _build_custom_mounts_section() -> str: +def _build_custom_mounts_section(*, app_config: AppConfig | None = None) -> str: """Build a prompt section for explicitly configured sandbox mounts.""" try: from deerflow.config import get_app_config - mounts = get_app_config().sandbox.mounts or [] + config = app_config or get_app_config() + mounts = config.sandbox.mounts or [] except Exception: logger.exception("Failed to load configured sandbox mounts for the lead-agent prompt") return "" @@ -674,7 +700,14 @@ def _build_custom_mounts_section() -> str: return f"\n**Custom Mounted Directories:**\n{mounts_list}\n- If the user needs files outside `/mnt/user-data`, use these absolute container paths directly when they match the requested directory" -def apply_prompt_template(subagent_enabled: bool = False, max_concurrent_subagents: int = 3, *, agent_name: str | None = None, available_skills: set[str] | None = None) -> str: +def apply_prompt_template( + subagent_enabled: bool = False, + max_concurrent_subagents: int = 3, + *, + agent_name: str | None = None, + available_skills: set[str] | None = None, + app_config: AppConfig | None = None, +) -> str: # Get memory context memory_context = _get_memory_context(agent_name) @@ -701,14 +734,14 @@ def apply_prompt_template(subagent_enabled: bool = False, max_concurrent_subagen ) # Get skills section - skills_section = get_skills_prompt_section(available_skills) + skills_section = get_skills_prompt_section(available_skills, app_config=app_config) # Get deferred tools section (tool_search) - deferred_tools_section = get_deferred_tools_prompt_section() + deferred_tools_section = get_deferred_tools_prompt_section(app_config=app_config) # Build ACP agent section only if ACP agents are configured acp_section = _build_acp_section() - custom_mounts_section = _build_custom_mounts_section() + custom_mounts_section = _build_custom_mounts_section(app_config=app_config) acp_and_mounts_section = "\n".join(section for section in (acp_section, custom_mounts_section) if section) # Format the prompt with dynamic skills and memory diff --git a/backend/packages/harness/deerflow/agents/memory/queue.py b/backend/packages/harness/deerflow/agents/memory/queue.py index 5a7686996..b2a147bce 100644 --- a/backend/packages/harness/deerflow/agents/memory/queue.py +++ b/backend/packages/harness/deerflow/agents/memory/queue.py @@ -20,6 +20,7 @@ class ConversationContext: messages: list[Any] timestamp: datetime = field(default_factory=lambda: datetime.now(UTC)) agent_name: str | None = None + user_id: str | None = None correction_detected: bool = False reinforcement_detected: bool = False @@ -44,6 +45,7 @@ class MemoryUpdateQueue: thread_id: str, messages: list[Any], agent_name: str | None = None, + user_id: str | None = None, correction_detected: bool = False, reinforcement_detected: bool = False, ) -> None: @@ -53,6 +55,9 @@ class MemoryUpdateQueue: thread_id: The thread ID. messages: The conversation messages. agent_name: If provided, memory is stored per-agent. If None, uses global memory. + user_id: The user ID captured at enqueue time. Stored in ConversationContext so it + survives the threading.Timer boundary (ContextVar does not propagate across + raw threads). correction_detected: Whether recent turns include an explicit correction signal. reinforcement_detected: Whether recent turns include a positive reinforcement signal. """ @@ -65,6 +70,7 @@ class MemoryUpdateQueue: thread_id=thread_id, messages=messages, agent_name=agent_name, + user_id=user_id, correction_detected=correction_detected, reinforcement_detected=reinforcement_detected, ) @@ -77,6 +83,7 @@ class MemoryUpdateQueue: thread_id: str, messages: list[Any], agent_name: str | None = None, + user_id: str | None = None, correction_detected: bool = False, reinforcement_detected: bool = False, ) -> None: @@ -90,6 +97,7 @@ class MemoryUpdateQueue: thread_id=thread_id, messages=messages, agent_name=agent_name, + user_id=user_id, correction_detected=correction_detected, reinforcement_detected=reinforcement_detected, ) @@ -103,6 +111,7 @@ class MemoryUpdateQueue: thread_id: str, messages: list[Any], agent_name: str | None, + user_id: str | None, correction_detected: bool, reinforcement_detected: bool, ) -> None: @@ -116,6 +125,7 @@ class MemoryUpdateQueue: thread_id=thread_id, messages=messages, agent_name=agent_name, + user_id=user_id, correction_detected=merged_correction_detected, reinforcement_detected=merged_reinforcement_detected, ) @@ -176,6 +186,7 @@ class MemoryUpdateQueue: agent_name=context.agent_name, correction_detected=context.correction_detected, reinforcement_detected=context.reinforcement_detected, + user_id=context.user_id, ) if success: logger.info("Memory updated successfully for thread %s", context.thread_id) diff --git a/backend/packages/harness/deerflow/agents/memory/storage.py b/backend/packages/harness/deerflow/agents/memory/storage.py index 8fae907d9..3d0a0e9af 100644 --- a/backend/packages/harness/deerflow/agents/memory/storage.py +++ b/backend/packages/harness/deerflow/agents/memory/storage.py @@ -44,17 +44,17 @@ class MemoryStorage(abc.ABC): """Abstract base class for memory storage providers.""" @abc.abstractmethod - def load(self, agent_name: str | None = None) -> dict[str, Any]: + def load(self, agent_name: str | None = None, *, user_id: str | None = None) -> dict[str, Any]: """Load memory data for the given agent.""" pass @abc.abstractmethod - def reload(self, agent_name: str | None = None) -> dict[str, Any]: + def reload(self, agent_name: str | None = None, *, user_id: str | None = None) -> dict[str, Any]: """Force reload memory data for the given agent.""" pass @abc.abstractmethod - def save(self, memory_data: dict[str, Any], agent_name: str | None = None) -> bool: + def save(self, memory_data: dict[str, Any], agent_name: str | None = None, *, user_id: str | None = None) -> bool: """Save memory data for the given agent.""" pass @@ -64,9 +64,9 @@ class FileMemoryStorage(MemoryStorage): def __init__(self): """Initialize the file memory storage.""" - # Per-agent memory cache: keyed by agent_name (None = global) + # Per-user/agent memory cache: keyed by (user_id, agent_name) tuple (None = global) # Value: (memory_data, file_mtime) - self._memory_cache: dict[str | None, tuple[dict[str, Any], float | None]] = {} + self._memory_cache: dict[tuple[str | None, str | None], tuple[dict[str, Any], float | None]] = {} # Guards all reads and writes to _memory_cache across concurrent callers. self._cache_lock = threading.Lock() @@ -81,21 +81,29 @@ class FileMemoryStorage(MemoryStorage): if not AGENT_NAME_PATTERN.match(agent_name): raise ValueError(f"Invalid agent name {agent_name!r}: names must match {AGENT_NAME_PATTERN.pattern}") - def _get_memory_file_path(self, agent_name: str | None = None) -> Path: + def _get_memory_file_path(self, agent_name: str | None = None, *, user_id: str | None = None) -> Path: """Get the path to the memory file.""" + if user_id is not None: + if agent_name is not None: + self._validate_agent_name(agent_name) + return get_paths().user_agent_memory_file(user_id, agent_name) + config = get_memory_config() + if config.storage_path and Path(config.storage_path).is_absolute(): + return Path(config.storage_path) + return get_paths().user_memory_file(user_id) + # Legacy: no user_id if agent_name is not None: self._validate_agent_name(agent_name) return get_paths().agent_memory_file(agent_name) - config = get_memory_config() if config.storage_path: p = Path(config.storage_path) return p if p.is_absolute() else get_paths().base_dir / p return get_paths().memory_file - def _load_memory_from_file(self, agent_name: str | None = None) -> dict[str, Any]: + def _load_memory_from_file(self, agent_name: str | None = None, *, user_id: str | None = None) -> dict[str, Any]: """Load memory data from file.""" - file_path = self._get_memory_file_path(agent_name) + file_path = self._get_memory_file_path(agent_name, user_id=user_id) if not file_path.exists(): return create_empty_memory() @@ -108,9 +116,14 @@ class FileMemoryStorage(MemoryStorage): logger.warning("Failed to load memory file: %s", e) return create_empty_memory() - def load(self, agent_name: str | None = None) -> dict[str, Any]: + @staticmethod + def _cache_key(agent_name: str | None = None, *, user_id: str | None = None) -> tuple[str | None, str | None]: + return (user_id, agent_name) + + def load(self, agent_name: str | None = None, *, user_id: str | None = None) -> dict[str, Any]: """Load memory data (cached with file modification time check).""" - file_path = self._get_memory_file_path(agent_name) + file_path = self._get_memory_file_path(agent_name, user_id=user_id) + cache_key = self._cache_key(agent_name, user_id=user_id) try: current_mtime = file_path.stat().st_mtime if file_path.exists() else None @@ -118,21 +131,22 @@ class FileMemoryStorage(MemoryStorage): current_mtime = None with self._cache_lock: - cached = self._memory_cache.get(agent_name) + cached = self._memory_cache.get(cache_key) if cached is not None and cached[1] == current_mtime: return cached[0] - memory_data = self._load_memory_from_file(agent_name) + memory_data = self._load_memory_from_file(agent_name, user_id=user_id) with self._cache_lock: - self._memory_cache[agent_name] = (memory_data, current_mtime) + self._memory_cache[cache_key] = (memory_data, current_mtime) return memory_data - def reload(self, agent_name: str | None = None) -> dict[str, Any]: + def reload(self, agent_name: str | None = None, *, user_id: str | None = None) -> dict[str, Any]: """Reload memory data from file, forcing cache invalidation.""" - file_path = self._get_memory_file_path(agent_name) - memory_data = self._load_memory_from_file(agent_name) + file_path = self._get_memory_file_path(agent_name, user_id=user_id) + memory_data = self._load_memory_from_file(agent_name, user_id=user_id) + cache_key = self._cache_key(agent_name, user_id=user_id) try: mtime = file_path.stat().st_mtime if file_path.exists() else None @@ -140,12 +154,13 @@ class FileMemoryStorage(MemoryStorage): mtime = None with self._cache_lock: - self._memory_cache[agent_name] = (memory_data, mtime) + self._memory_cache[cache_key] = (memory_data, mtime) return memory_data - def save(self, memory_data: dict[str, Any], agent_name: str | None = None) -> bool: + def save(self, memory_data: dict[str, Any], agent_name: str | None = None, *, user_id: str | None = None) -> bool: """Save memory data to file and update cache.""" - file_path = self._get_memory_file_path(agent_name) + file_path = self._get_memory_file_path(agent_name, user_id=user_id) + cache_key = self._cache_key(agent_name, user_id=user_id) try: file_path.parent.mkdir(parents=True, exist_ok=True) @@ -166,7 +181,7 @@ class FileMemoryStorage(MemoryStorage): mtime = None with self._cache_lock: - self._memory_cache[agent_name] = (memory_data, mtime) + self._memory_cache[cache_key] = (memory_data, mtime) logger.info("Memory saved to %s", file_path) return True except OSError as e: diff --git a/backend/packages/harness/deerflow/agents/memory/updater.py b/backend/packages/harness/deerflow/agents/memory/updater.py index 0966b8c48..6e55330a1 100644 --- a/backend/packages/harness/deerflow/agents/memory/updater.py +++ b/backend/packages/harness/deerflow/agents/memory/updater.py @@ -9,7 +9,6 @@ import logging import math import re import uuid -from collections.abc import Awaitable from typing import Any from deerflow.agents.memory.prompt import ( @@ -26,6 +25,12 @@ from deerflow.models import create_chat_model logger = logging.getLogger(__name__) + +# Thread pool for offloading sync memory updates when called from an async +# context. Unlike the previous asyncio.run() approach, this runs *sync* +# model.invoke() calls — no event loop is created, so the langchain async +# httpx client pool (globally cached via @lru_cache) is never touched and +# cross-loop connection reuse is impossible. _SYNC_MEMORY_UPDATER_EXECUTOR = concurrent.futures.ThreadPoolExecutor( max_workers=4, thread_name_prefix="memory-updater-sync", @@ -38,27 +43,28 @@ def _create_empty_memory() -> dict[str, Any]: return create_empty_memory() -def _save_memory_to_file(memory_data: dict[str, Any], agent_name: str | None = None) -> bool: +def _save_memory_to_file(memory_data: dict[str, Any], agent_name: str | None = None, *, user_id: str | None = None) -> bool: """Backward-compatible wrapper around the configured memory storage save path.""" - return get_memory_storage().save(memory_data, agent_name) + return get_memory_storage().save(memory_data, agent_name, user_id=user_id) -def get_memory_data(agent_name: str | None = None) -> dict[str, Any]: +def get_memory_data(agent_name: str | None = None, *, user_id: str | None = None) -> dict[str, Any]: """Get the current memory data via storage provider.""" - return get_memory_storage().load(agent_name) + return get_memory_storage().load(agent_name, user_id=user_id) -def reload_memory_data(agent_name: str | None = None) -> dict[str, Any]: +def reload_memory_data(agent_name: str | None = None, *, user_id: str | None = None) -> dict[str, Any]: """Reload memory data via storage provider.""" - return get_memory_storage().reload(agent_name) + return get_memory_storage().reload(agent_name, user_id=user_id) -def import_memory_data(memory_data: dict[str, Any], agent_name: str | None = None) -> dict[str, Any]: +def import_memory_data(memory_data: dict[str, Any], agent_name: str | None = None, *, user_id: str | None = None) -> dict[str, Any]: """Persist imported memory data via storage provider. Args: memory_data: Full memory payload to persist. agent_name: If provided, imports into per-agent memory. + user_id: If provided, scopes memory to a specific user. Returns: The saved memory data after storage normalization. @@ -67,15 +73,15 @@ def import_memory_data(memory_data: dict[str, Any], agent_name: str | None = Non OSError: If persisting the imported memory fails. """ storage = get_memory_storage() - if not storage.save(memory_data, agent_name): + if not storage.save(memory_data, agent_name, user_id=user_id): raise OSError("Failed to save imported memory data") - return storage.load(agent_name) + return storage.load(agent_name, user_id=user_id) -def clear_memory_data(agent_name: str | None = None) -> dict[str, Any]: +def clear_memory_data(agent_name: str | None = None, *, user_id: str | None = None) -> dict[str, Any]: """Clear all stored memory data and persist an empty structure.""" cleared_memory = create_empty_memory() - if not _save_memory_to_file(cleared_memory, agent_name): + if not _save_memory_to_file(cleared_memory, agent_name, user_id=user_id): raise OSError("Failed to save cleared memory data") return cleared_memory @@ -92,6 +98,8 @@ def create_memory_fact( category: str = "context", confidence: float = 0.5, agent_name: str | None = None, + *, + user_id: str | None = None, ) -> dict[str, Any]: """Create a new fact and persist the updated memory data.""" normalized_content = content.strip() @@ -101,7 +109,7 @@ def create_memory_fact( normalized_category = category.strip() or "context" validated_confidence = _validate_confidence(confidence) now = utc_now_iso_z() - memory_data = get_memory_data(agent_name) + memory_data = get_memory_data(agent_name, user_id=user_id) updated_memory = dict(memory_data) facts = list(memory_data.get("facts", [])) facts.append( @@ -116,15 +124,15 @@ def create_memory_fact( ) updated_memory["facts"] = facts - if not _save_memory_to_file(updated_memory, agent_name): + if not _save_memory_to_file(updated_memory, agent_name, user_id=user_id): raise OSError("Failed to save memory data after creating fact") return updated_memory -def delete_memory_fact(fact_id: str, agent_name: str | None = None) -> dict[str, Any]: +def delete_memory_fact(fact_id: str, agent_name: str | None = None, *, user_id: str | None = None) -> dict[str, Any]: """Delete a fact by its id and persist the updated memory data.""" - memory_data = get_memory_data(agent_name) + memory_data = get_memory_data(agent_name, user_id=user_id) facts = memory_data.get("facts", []) updated_facts = [fact for fact in facts if fact.get("id") != fact_id] if len(updated_facts) == len(facts): @@ -133,7 +141,7 @@ def delete_memory_fact(fact_id: str, agent_name: str | None = None) -> dict[str, updated_memory = dict(memory_data) updated_memory["facts"] = updated_facts - if not _save_memory_to_file(updated_memory, agent_name): + if not _save_memory_to_file(updated_memory, agent_name, user_id=user_id): raise OSError(f"Failed to save memory data after deleting fact '{fact_id}'") return updated_memory @@ -145,9 +153,11 @@ def update_memory_fact( category: str | None = None, confidence: float | None = None, agent_name: str | None = None, + *, + user_id: str | None = None, ) -> dict[str, Any]: """Update an existing fact and persist the updated memory data.""" - memory_data = get_memory_data(agent_name) + memory_data = get_memory_data(agent_name, user_id=user_id) updated_memory = dict(memory_data) updated_facts: list[dict[str, Any]] = [] found = False @@ -174,7 +184,7 @@ def update_memory_fact( updated_memory["facts"] = updated_facts - if not _save_memory_to_file(updated_memory, agent_name): + if not _save_memory_to_file(updated_memory, agent_name, user_id=user_id): raise OSError(f"Failed to save memory data after updating fact '{fact_id}'") return updated_memory @@ -217,39 +227,6 @@ def _extract_text(content: Any) -> str: return str(content) -def _run_async_update_sync(coro: Awaitable[bool]) -> bool: - """Run an async memory update from sync code, including nested-loop contexts.""" - handed_off = False - - try: - try: - loop = asyncio.get_running_loop() - except RuntimeError: - loop = None - - if loop is not None and loop.is_running(): - future = _SYNC_MEMORY_UPDATER_EXECUTOR.submit(asyncio.run, coro) - handed_off = True - return future.result() - - handed_off = True - return asyncio.run(coro) - except Exception: - if not handed_off: - close = getattr(coro, "close", None) - if callable(close): - try: - close() - except Exception: - logger.debug( - "Failed to close un-awaited memory update coroutine", - exc_info=True, - ) - - logger.exception("Failed to run async memory update from sync context") - return False - - # Matches sentences that describe a file-upload *event* rather than general # file-related work. Deliberately narrow to avoid removing legitimate facts # such as "User works with CSV files" or "prefers PDF export". @@ -344,13 +321,14 @@ class MemoryUpdater: agent_name: str | None, correction_detected: bool, reinforcement_detected: bool, + user_id: str | None = None, ) -> tuple[dict[str, Any], str] | None: """Load memory and build the update prompt for a conversation.""" config = get_memory_config() if not config.enabled or not messages: return None - current_memory = get_memory_data(agent_name) + current_memory = get_memory_data(agent_name, user_id=user_id) conversation_text = format_conversation_for_update(messages) if not conversation_text.strip(): return None @@ -372,6 +350,7 @@ class MemoryUpdater: response_content: Any, thread_id: str | None, agent_name: str | None, + user_id: str | None = None, ) -> bool: """Parse the model response, apply updates, and persist memory.""" response_text = _extract_text(response_content).strip() @@ -385,7 +364,7 @@ class MemoryUpdater: # cannot corrupt the still-cached original object reference. updated_memory = self._apply_updates(copy.deepcopy(current_memory), update_data, thread_id) updated_memory = _strip_upload_mentions_from_memory(updated_memory) - return get_memory_storage().save(updated_memory, agent_name) + return get_memory_storage().save(updated_memory, agent_name, user_id=user_id) async def aupdate_memory( self, @@ -394,28 +373,63 @@ class MemoryUpdater: agent_name: str | None = None, correction_detected: bool = False, reinforcement_detected: bool = False, + user_id: str | None = None, ) -> bool: - """Update memory asynchronously based on conversation messages.""" + """Update memory asynchronously by delegating to the sync path. + + Uses ``asyncio.to_thread`` to run the *sync* ``model.invoke()`` path + in a worker thread so no second event loop is created and the + langchain async httpx client pool (shared with the lead agent) is + never touched. This eliminates the cross-loop connection-reuse bug + described in issue #2615. + """ + return await asyncio.to_thread( + self._do_update_memory_sync, + messages=messages, + thread_id=thread_id, + agent_name=agent_name, + correction_detected=correction_detected, + reinforcement_detected=reinforcement_detected, + user_id=user_id, + ) + + def _do_update_memory_sync( + self, + messages: list[Any], + thread_id: str | None = None, + agent_name: str | None = None, + correction_detected: bool = False, + reinforcement_detected: bool = False, + user_id: str | None = None, + ) -> bool: + """Pure-sync memory update using ``model.invoke()``. + + Uses the *sync* LLM call path so no event loop is created. This + guarantees that the langchain provider's globally cached async + httpx ``AsyncClient`` / connection pool (the one shared with the + lead agent) is never touched — no cross-loop connection reuse is + possible. + """ try: - prepared = await asyncio.to_thread( - self._prepare_update_prompt, + prepared = self._prepare_update_prompt( messages=messages, agent_name=agent_name, correction_detected=correction_detected, reinforcement_detected=reinforcement_detected, + user_id=user_id, ) if prepared is None: return False current_memory, prompt = prepared model = self._get_model() - response = await model.ainvoke(prompt) - return await asyncio.to_thread( - self._finalize_update, + response = model.invoke(prompt, config={"run_name": "memory_agent"}) + return self._finalize_update( current_memory=current_memory, response_content=response.content, thread_id=thread_id, agent_name=agent_name, + user_id=user_id, ) except json.JSONDecodeError as e: logger.warning("Failed to parse LLM response for memory update: %s", e) @@ -431,8 +445,18 @@ class MemoryUpdater: agent_name: str | None = None, correction_detected: bool = False, reinforcement_detected: bool = False, + user_id: str | None = None, ) -> bool: - """Synchronously update memory via the async updater path. + """Synchronously update memory using the sync LLM path. + + Uses ``model.invoke()`` (sync HTTP) which operates on a completely + separate connection pool from the async ``AsyncClient`` shared by + the lead agent. This eliminates the cross-loop connection-reuse + bug described in issue #2615. + + When called from within a running event loop (e.g. from a LangGraph + node), the blocking sync call is offloaded to a thread pool so the + caller's loop is not blocked. Args: messages: List of conversation messages. @@ -440,18 +464,39 @@ class MemoryUpdater: agent_name: If provided, updates per-agent memory. If None, updates global memory. correction_detected: Whether recent turns include an explicit correction signal. reinforcement_detected: Whether recent turns include a positive reinforcement signal. + user_id: If provided, scopes memory to a specific user. Returns: True if update was successful, False otherwise. """ - return _run_async_update_sync( - self.aupdate_memory( - messages=messages, - thread_id=thread_id, - agent_name=agent_name, - correction_detected=correction_detected, - reinforcement_detected=reinforcement_detected, - ) + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = None + + if loop is not None and loop.is_running(): + try: + future = _SYNC_MEMORY_UPDATER_EXECUTOR.submit( + self._do_update_memory_sync, + messages=messages, + thread_id=thread_id, + agent_name=agent_name, + correction_detected=correction_detected, + reinforcement_detected=reinforcement_detected, + user_id=user_id, + ) + return future.result() + except Exception: + logger.exception("Failed to offload memory update to executor") + return False + + return self._do_update_memory_sync( + messages=messages, + thread_id=thread_id, + agent_name=agent_name, + correction_detected=correction_detected, + reinforcement_detected=reinforcement_detected, + user_id=user_id, ) def _apply_updates( @@ -547,6 +592,7 @@ def update_memory_from_conversation( agent_name: str | None = None, correction_detected: bool = False, reinforcement_detected: bool = False, + user_id: str | None = None, ) -> bool: """Convenience function to update memory from a conversation. @@ -556,9 +602,10 @@ def update_memory_from_conversation( agent_name: If provided, updates per-agent memory. If None, updates global memory. correction_detected: Whether recent turns include an explicit correction signal. reinforcement_detected: Whether recent turns include a positive reinforcement signal. + user_id: If provided, scopes memory to a specific user. Returns: True if successful, False otherwise. """ updater = MemoryUpdater() - return updater.update_memory(messages, thread_id, agent_name, correction_detected, reinforcement_detected) + return updater.update_memory(messages, thread_id, agent_name, correction_detected, reinforcement_detected, user_id=user_id) diff --git a/backend/packages/harness/deerflow/agents/middlewares/deferred_tool_filter_middleware.py b/backend/packages/harness/deerflow/agents/middlewares/deferred_tool_filter_middleware.py index 604cdf37c..f92d90158 100644 --- a/backend/packages/harness/deerflow/agents/middlewares/deferred_tool_filter_middleware.py +++ b/backend/packages/harness/deerflow/agents/middlewares/deferred_tool_filter_middleware.py @@ -16,6 +16,9 @@ from typing import override from langchain.agents import AgentState from langchain.agents.middleware import AgentMiddleware from langchain.agents.middleware.types import ModelCallResult, ModelRequest, ModelResponse +from langchain_core.messages import ToolMessage +from langgraph.prebuilt.tool_node import ToolCallRequest +from langgraph.types import Command logger = logging.getLogger(__name__) @@ -35,7 +38,7 @@ class DeferredToolFilterMiddleware(AgentMiddleware[AgentState]): if not registry: return request - deferred_names = {e.name for e in registry.entries} + deferred_names = registry.deferred_names active_tools = [t for t in request.tools if getattr(t, "name", None) not in deferred_names] if len(active_tools) < len(request.tools): @@ -43,6 +46,28 @@ class DeferredToolFilterMiddleware(AgentMiddleware[AgentState]): return request.override(tools=active_tools) + def _blocked_tool_message(self, request: ToolCallRequest) -> ToolMessage | None: + from deerflow.tools.builtins.tool_search import get_deferred_registry + + registry = get_deferred_registry() + if not registry: + return None + + tool_name = str(request.tool_call.get("name") or "") + if not tool_name: + return None + + if not registry.contains(tool_name): + return None + + tool_call_id = str(request.tool_call.get("id") or "missing_tool_call_id") + return ToolMessage( + content=(f"Error: Tool '{tool_name}' is deferred and has not been promoted yet. Call tool_search first to expose and promote this tool's schema, then retry."), + tool_call_id=tool_call_id, + name=tool_name, + status="error", + ) + @override def wrap_model_call( self, @@ -51,6 +76,17 @@ class DeferredToolFilterMiddleware(AgentMiddleware[AgentState]): ) -> ModelCallResult: return handler(self._filter_tools(request)) + @override + def wrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], ToolMessage | Command], + ) -> ToolMessage | Command: + blocked = self._blocked_tool_message(request) + if blocked is not None: + return blocked + return handler(request) + @override async def awrap_model_call( self, @@ -58,3 +94,14 @@ class DeferredToolFilterMiddleware(AgentMiddleware[AgentState]): handler: Callable[[ModelRequest], Awaitable[ModelResponse]], ) -> ModelCallResult: return await handler(self._filter_tools(request)) + + @override + async def awrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]], + ) -> ToolMessage | Command: + blocked = self._blocked_tool_message(request) + if blocked is not None: + return blocked + return await handler(request) diff --git a/backend/packages/harness/deerflow/agents/middlewares/llm_error_handling_middleware.py b/backend/packages/harness/deerflow/agents/middlewares/llm_error_handling_middleware.py index 4ef9f5e7d..ef23e08f1 100644 --- a/backend/packages/harness/deerflow/agents/middlewares/llm_error_handling_middleware.py +++ b/backend/packages/harness/deerflow/agents/middlewares/llm_error_handling_middleware.py @@ -20,7 +20,7 @@ from langchain.agents.middleware.types import ( from langchain_core.messages import AIMessage from langgraph.errors import GraphBubbleUp -from deerflow.config import get_app_config +from deerflow.config.app_config import AppConfig logger = logging.getLogger(__name__) @@ -70,20 +70,11 @@ class LLMErrorHandlingMiddleware(AgentMiddleware[AgentState]): retry_base_delay_ms: int = 1000 retry_cap_delay_ms: int = 8000 - circuit_failure_threshold: int = 5 - circuit_recovery_timeout_sec: int = 60 - - def __init__(self, **kwargs: Any) -> None: + def __init__(self, *, app_config: AppConfig, **kwargs: Any) -> None: super().__init__(**kwargs) - # Load Circuit Breaker configs from app config if available, fall back to defaults - try: - app_config = get_app_config() - self.circuit_failure_threshold = app_config.circuit_breaker.failure_threshold - self.circuit_recovery_timeout_sec = app_config.circuit_breaker.recovery_timeout_sec - except (FileNotFoundError, RuntimeError): - # Gracefully fall back to class defaults in test environments - pass + self.circuit_failure_threshold = app_config.circuit_breaker.failure_threshold + self.circuit_recovery_timeout_sec = app_config.circuit_breaker.recovery_timeout_sec # Circuit Breaker state self._circuit_lock = threading.Lock() diff --git a/backend/packages/harness/deerflow/agents/middlewares/loop_detection_middleware.py b/backend/packages/harness/deerflow/agents/middlewares/loop_detection_middleware.py index 4c1ba28ec..36054876b 100644 --- a/backend/packages/harness/deerflow/agents/middlewares/loop_detection_middleware.py +++ b/backend/packages/harness/deerflow/agents/middlewares/loop_detection_middleware.py @@ -362,7 +362,7 @@ class LoopDetectionMiddleware(AgentMiddleware[AgentState]): # the conversation; injecting one mid-conversation crashes # langchain_anthropic's _format_messages(). HumanMessage works # with all providers. See #1299. - return {"messages": [HumanMessage(content=warning)]} + return {"messages": [HumanMessage(content=warning, name="loop_warning")]} return None diff --git a/backend/packages/harness/deerflow/agents/middlewares/memory_middleware.py b/backend/packages/harness/deerflow/agents/middlewares/memory_middleware.py index f1dccf689..059f8ffc2 100644 --- a/backend/packages/harness/deerflow/agents/middlewares/memory_middleware.py +++ b/backend/packages/harness/deerflow/agents/middlewares/memory_middleware.py @@ -11,6 +11,7 @@ from langgraph.runtime import Runtime from deerflow.agents.memory.message_processing import detect_correction, detect_reinforcement, filter_messages_for_memory from deerflow.agents.memory.queue import get_memory_queue from deerflow.config.memory_config import get_memory_config +from deerflow.runtime.user_context import get_effective_user_id logger = logging.getLogger(__name__) @@ -86,11 +87,16 @@ class MemoryMiddleware(AgentMiddleware[MemoryMiddlewareState]): # Queue the filtered conversation for memory update correction_detected = detect_correction(filtered_messages) reinforcement_detected = not correction_detected and detect_reinforcement(filtered_messages) + # Capture user_id at enqueue time while the request context is still alive. + # threading.Timer fires on a different thread where ContextVar values are not + # propagated, so we must store user_id explicitly in ConversationContext. + user_id = get_effective_user_id() queue = get_memory_queue() queue.add( thread_id=thread_id, messages=filtered_messages, agent_name=self._agent_name, + user_id=user_id, correction_detected=correction_detected, reinforcement_detected=reinforcement_detected, ) diff --git a/backend/packages/harness/deerflow/agents/middlewares/summarization_middleware.py b/backend/packages/harness/deerflow/agents/middlewares/summarization_middleware.py index fba44c215..9f2c1a055 100644 --- a/backend/packages/harness/deerflow/agents/middlewares/summarization_middleware.py +++ b/backend/packages/harness/deerflow/agents/middlewares/summarization_middleware.py @@ -3,12 +3,13 @@ from __future__ import annotations import logging +from collections.abc import Collection from dataclasses import dataclass -from typing import Protocol, runtime_checkable +from typing import Any, Protocol, override, runtime_checkable from langchain.agents import AgentState from langchain.agents.middleware import SummarizationMiddleware -from langchain_core.messages import AnyMessage, RemoveMessage +from langchain_core.messages import AIMessage, AnyMessage, HumanMessage, RemoveMessage, ToolMessage from langgraph.config import get_config from langgraph.graph.message import REMOVE_ALL_MESSAGES from langgraph.runtime import Runtime @@ -58,17 +59,63 @@ def _resolve_agent_name(runtime: Runtime) -> str | None: return agent_name +def _tool_call_path(tool_call: dict[str, Any]) -> str | None: + """Best-effort extraction of a file path argument from a read_file-like tool call.""" + args = tool_call.get("args") or {} + if not isinstance(args, dict): + return None + for key in ("path", "file_path", "filepath"): + value = args.get(key) + if isinstance(value, str) and value: + return value + return None + + +def _clone_ai_message( + message: AIMessage, + tool_calls: list[dict[str, Any]], + *, + content: Any | None = None, +) -> AIMessage: + """Clone an AIMessage while replacing its tool_calls list and optional content.""" + update: dict[str, Any] = {"tool_calls": tool_calls} + if content is not None: + update["content"] = content + return message.model_copy(update=update) + + +@dataclass +class _SkillBundle: + """Skill-related tool calls and tool results associated with one AIMessage.""" + + ai_index: int + skill_tool_indices: tuple[int, ...] + skill_tool_call_ids: frozenset[str] + skill_tool_tokens: int + skill_key: str + + class DeerFlowSummarizationMiddleware(SummarizationMiddleware): - """Summarization middleware with pre-compression hook dispatch.""" + """Summarization middleware with pre-compression hook dispatch and skill rescue.""" def __init__( self, *args, + skills_container_path: str | None = None, + skill_file_read_tool_names: Collection[str] | None = None, before_summarization: list[BeforeSummarizationHook] | None = None, + preserve_recent_skill_count: int = 5, + preserve_recent_skill_tokens: int = 25_000, + preserve_recent_skill_tokens_per_skill: int = 5_000, **kwargs, ) -> None: super().__init__(*args, **kwargs) + self._skills_container_path = skills_container_path or "/mnt/skills" + self._skill_file_read_tool_names = frozenset(skill_file_read_tool_names or {"read_file", "read", "view", "cat"}) self._before_summarization_hooks = before_summarization or [] + self._preserve_recent_skill_count = max(0, preserve_recent_skill_count) + self._preserve_recent_skill_tokens = max(0, preserve_recent_skill_tokens) + self._preserve_recent_skill_tokens_per_skill = max(0, preserve_recent_skill_tokens_per_skill) def before_model(self, state: AgentState, runtime: Runtime) -> dict | None: return self._maybe_summarize(state, runtime) @@ -88,7 +135,7 @@ class DeerFlowSummarizationMiddleware(SummarizationMiddleware): if cutoff_index <= 0: return None - messages_to_summarize, preserved_messages = self._partition_messages(messages, cutoff_index) + messages_to_summarize, preserved_messages = self._partition_with_skill_rescue(messages, cutoff_index) self._fire_hooks(messages_to_summarize, preserved_messages, runtime) summary = self._create_summary(messages_to_summarize) new_messages = self._build_new_messages(summary) @@ -113,7 +160,7 @@ class DeerFlowSummarizationMiddleware(SummarizationMiddleware): if cutoff_index <= 0: return None - messages_to_summarize, preserved_messages = self._partition_messages(messages, cutoff_index) + messages_to_summarize, preserved_messages = self._partition_with_skill_rescue(messages, cutoff_index) self._fire_hooks(messages_to_summarize, preserved_messages, runtime) summary = await self._acreate_summary(messages_to_summarize) new_messages = self._build_new_messages(summary) @@ -126,6 +173,162 @@ class DeerFlowSummarizationMiddleware(SummarizationMiddleware): ] } + @override + def _build_new_messages(self, summary: str) -> list[HumanMessage]: + """Override the base implementation to let the human message with the special name 'summary'. + And this message will be ignored to display in the frontend, but still can be used as context for the model. + """ + return [HumanMessage(content=f"Here is a summary of the conversation to date:\n\n{summary}", name="summary")] + + def _partition_with_skill_rescue( + self, + messages: list[AnyMessage], + cutoff_index: int, + ) -> tuple[list[AnyMessage], list[AnyMessage]]: + """Partition like the parent, then rescue recently-loaded skill bundles.""" + to_summarize, preserved = self._partition_messages(messages, cutoff_index) + + if self._preserve_recent_skill_count == 0 or self._preserve_recent_skill_tokens == 0 or not to_summarize: + return to_summarize, preserved + + try: + bundles = self._find_skill_bundles(to_summarize, self._skills_container_path) + except Exception: + logger.exception("Skill-preserving summarization rescue failed; falling back to default partition") + return to_summarize, preserved + + if not bundles: + return to_summarize, preserved + + rescue_bundles = self._select_bundles_to_rescue(bundles) + if not rescue_bundles: + return to_summarize, preserved + + bundles_by_ai_index = {bundle.ai_index: bundle for bundle in rescue_bundles} + rescue_tool_indices = {idx for bundle in rescue_bundles for idx in bundle.skill_tool_indices} + rescued: list[AnyMessage] = [] + remaining: list[AnyMessage] = [] + for i, msg in enumerate(to_summarize): + bundle = bundles_by_ai_index.get(i) + if bundle is not None and isinstance(msg, AIMessage): + rescued_tool_calls = [tc for tc in msg.tool_calls if tc.get("id") in bundle.skill_tool_call_ids] + remaining_tool_calls = [tc for tc in msg.tool_calls if tc.get("id") not in bundle.skill_tool_call_ids] + + if rescued_tool_calls: + rescued.append(_clone_ai_message(msg, rescued_tool_calls, content="")) + if remaining_tool_calls or msg.content: + remaining.append(_clone_ai_message(msg, remaining_tool_calls)) + continue + + if i in rescue_tool_indices: + rescued.append(msg) + continue + + remaining.append(msg) + + return remaining, rescued + preserved + + def _find_skill_bundles( + self, + messages: list[AnyMessage], + skills_root: str, + ) -> list[_SkillBundle]: + """Locate AIMessage + paired ToolMessage groups that load skill files.""" + bundles: list[_SkillBundle] = [] + n = len(messages) + i = 0 + while i < n: + msg = messages[i] + if not (isinstance(msg, AIMessage) and msg.tool_calls): + i += 1 + continue + + tool_calls = list(msg.tool_calls) + skill_paths_by_id: dict[str, str] = {} + for tc in tool_calls: + if self._is_skill_tool_call(tc, skills_root): + tc_id = tc.get("id") + path = _tool_call_path(tc) + if tc_id and path: + skill_paths_by_id[tc_id] = path + + if not skill_paths_by_id: + i += 1 + continue + + skill_tool_tokens = 0 + skill_key_parts: list[str] = [] + skill_tool_indices: list[int] = [] + matched_skill_call_ids: set[str] = set() + + j = i + 1 + while j < n and isinstance(messages[j], ToolMessage): + j += 1 + + for k in range(i + 1, j): + tool_msg = messages[k] + if isinstance(tool_msg, ToolMessage) and tool_msg.tool_call_id in skill_paths_by_id: + skill_tool_tokens += self.token_counter([tool_msg]) + skill_key_parts.append(skill_paths_by_id[tool_msg.tool_call_id]) + skill_tool_indices.append(k) + matched_skill_call_ids.add(tool_msg.tool_call_id) + + if not skill_tool_indices: + i = j + continue + + bundles.append( + _SkillBundle( + ai_index=i, + skill_tool_indices=tuple(skill_tool_indices), + skill_tool_call_ids=frozenset(matched_skill_call_ids), + skill_tool_tokens=skill_tool_tokens, + skill_key="|".join(sorted(skill_key_parts)), + ) + ) + i = j + + return bundles + + def _select_bundles_to_rescue(self, bundles: list[_SkillBundle]) -> list[_SkillBundle]: + """Pick bundles to keep, walking newest-first under count/token budgets.""" + selected: list[_SkillBundle] = [] + if not bundles: + return selected + + seen_skill_keys: set[str] = set() + total_tokens = 0 + kept = 0 + + for bundle in reversed(bundles): + if kept >= self._preserve_recent_skill_count: + break + if bundle.skill_key in seen_skill_keys: + continue + if bundle.skill_tool_tokens > self._preserve_recent_skill_tokens_per_skill: + continue + if total_tokens + bundle.skill_tool_tokens > self._preserve_recent_skill_tokens: + continue + + selected.append(bundle) + total_tokens += bundle.skill_tool_tokens + kept += 1 + seen_skill_keys.add(bundle.skill_key) + + selected.reverse() + return selected + + def _is_skill_tool_call(self, tool_call: dict[str, Any], skills_root: str) -> bool: + """Return True when ``tool_call`` reads a file under the configured skills root.""" + name = tool_call.get("name") or "" + if name not in self._skill_file_read_tool_names: + return False + path = _tool_call_path(tool_call) + if not path: + return False + normalized_root = skills_root.rstrip("/") + return path == normalized_root or path.startswith(normalized_root + "/") + def _fire_hooks( self, messages_to_summarize: list[AnyMessage], diff --git a/backend/packages/harness/deerflow/agents/middlewares/thread_data_middleware.py b/backend/packages/harness/deerflow/agents/middlewares/thread_data_middleware.py index c25531e02..8d93de4ff 100644 --- a/backend/packages/harness/deerflow/agents/middlewares/thread_data_middleware.py +++ b/backend/packages/harness/deerflow/agents/middlewares/thread_data_middleware.py @@ -1,13 +1,16 @@ import logging +from datetime import UTC, datetime from typing import NotRequired, override from langchain.agents import AgentState from langchain.agents.middleware import AgentMiddleware +from langchain_core.messages import HumanMessage from langgraph.config import get_config from langgraph.runtime import Runtime from deerflow.agents.thread_state import ThreadDataState from deerflow.config.paths import Paths, get_paths +from deerflow.runtime.user_context import get_effective_user_id logger = logging.getLogger(__name__) @@ -46,32 +49,34 @@ class ThreadDataMiddleware(AgentMiddleware[ThreadDataMiddlewareState]): self._paths = Paths(base_dir) if base_dir else get_paths() self._lazy_init = lazy_init - def _get_thread_paths(self, thread_id: str) -> dict[str, str]: + def _get_thread_paths(self, thread_id: str, user_id: str | None = None) -> dict[str, str]: """Get the paths for a thread's data directories. Args: thread_id: The thread ID. + user_id: Optional user ID for per-user path isolation. Returns: Dictionary with workspace_path, uploads_path, and outputs_path. """ return { - "workspace_path": str(self._paths.sandbox_work_dir(thread_id)), - "uploads_path": str(self._paths.sandbox_uploads_dir(thread_id)), - "outputs_path": str(self._paths.sandbox_outputs_dir(thread_id)), + "workspace_path": str(self._paths.sandbox_work_dir(thread_id, user_id=user_id)), + "uploads_path": str(self._paths.sandbox_uploads_dir(thread_id, user_id=user_id)), + "outputs_path": str(self._paths.sandbox_outputs_dir(thread_id, user_id=user_id)), } - def _create_thread_directories(self, thread_id: str) -> dict[str, str]: + def _create_thread_directories(self, thread_id: str, user_id: str | None = None) -> dict[str, str]: """Create the thread data directories. Args: thread_id: The thread ID. + user_id: Optional user ID for per-user path isolation. Returns: Dictionary with the created directory paths. """ - self._paths.ensure_thread_dirs(thread_id) - return self._get_thread_paths(thread_id) + self._paths.ensure_thread_dirs(thread_id, user_id=user_id) + return self._get_thread_paths(thread_id, user_id=user_id) @override def before_agent(self, state: ThreadDataMiddlewareState, runtime: Runtime) -> dict | None: @@ -84,16 +89,30 @@ class ThreadDataMiddleware(AgentMiddleware[ThreadDataMiddlewareState]): if thread_id is None: raise ValueError("Thread ID is required in runtime context or config.configurable") + user_id = get_effective_user_id() + if self._lazy_init: # Lazy initialization: only compute paths, don't create directories - paths = self._get_thread_paths(thread_id) + paths = self._get_thread_paths(thread_id, user_id=user_id) else: # Eager initialization: create directories immediately - paths = self._create_thread_directories(thread_id) + paths = self._create_thread_directories(thread_id, user_id=user_id) logger.debug("Created thread data directories for thread %s", thread_id) + messages = list(state.get("messages", [])) + last_message = messages[-1] if messages else None + + if last_message and isinstance(last_message, HumanMessage): + messages[-1] = HumanMessage( + content=last_message.content, + id=last_message.id, + name=last_message.name or "user-input", + additional_kwargs={**last_message.additional_kwargs, "run_id": runtime.context.get("run_id"), "timestamp": datetime.now(UTC).isoformat()}, + ) + return { "thread_data": { **paths, - } + }, + "messages": messages, } diff --git a/backend/packages/harness/deerflow/agents/middlewares/title_middleware.py b/backend/packages/harness/deerflow/agents/middlewares/title_middleware.py index dd131ac28..5cd5bb46c 100644 --- a/backend/packages/harness/deerflow/agents/middlewares/title_middleware.py +++ b/backend/packages/harness/deerflow/agents/middlewares/title_middleware.py @@ -2,10 +2,11 @@ import logging import re -from typing import NotRequired, override +from typing import Any, NotRequired, override from langchain.agents import AgentState from langchain.agents.middleware import AgentMiddleware +from langgraph.config import get_config from langgraph.runtime import Runtime from deerflow.config.title_config import get_title_config @@ -106,6 +107,21 @@ class TitleMiddleware(AgentMiddleware[TitleMiddlewareState]): return user_msg[:fallback_chars].rstrip() + "..." return user_msg if user_msg else "New Conversation" + def _get_runnable_config(self) -> dict[str, Any]: + """Inherit the parent RunnableConfig and add middleware tag. + + This ensures RunJournal identifies LLM calls from this middleware + as ``middleware:title`` instead of ``lead_agent``. + """ + try: + parent = get_config() + except Exception: + parent = {} + config = {**parent} + config["run_name"] = "title_agent" + config["tags"] = [*(config.get("tags") or []), "middleware:title"] + return config + def _generate_title_result(self, state: TitleMiddlewareState) -> dict | None: """Generate a local fallback title without blocking on an LLM call.""" if not self._should_generate_title(state): @@ -127,7 +143,7 @@ class TitleMiddleware(AgentMiddleware[TitleMiddlewareState]): model = create_chat_model(name=config.model_name, thinking_enabled=False) else: model = create_chat_model(thinking_enabled=False) - response = await model.ainvoke(prompt) + response = await model.ainvoke(prompt, config=self._get_runnable_config()) title = self._parse_title(response.content) if title: return {"title": title} diff --git a/backend/packages/harness/deerflow/agents/middlewares/tool_error_handling_middleware.py b/backend/packages/harness/deerflow/agents/middlewares/tool_error_handling_middleware.py index 52be28bfb..db0230cf9 100644 --- a/backend/packages/harness/deerflow/agents/middlewares/tool_error_handling_middleware.py +++ b/backend/packages/harness/deerflow/agents/middlewares/tool_error_handling_middleware.py @@ -11,6 +11,8 @@ from langgraph.errors import GraphBubbleUp from langgraph.prebuilt.tool_node import ToolCallRequest from langgraph.types import Command +from deerflow.config.app_config import AppConfig + logger = logging.getLogger(__name__) _MISSING_TOOL_CALL_ID = "missing_tool_call_id" @@ -67,6 +69,7 @@ class ToolErrorHandlingMiddleware(AgentMiddleware[AgentState]): def _build_runtime_middlewares( *, + app_config: AppConfig, include_uploads: bool, include_dangling_tool_call_patch: bool, lazy_init: bool = True, @@ -91,12 +94,10 @@ def _build_runtime_middlewares( middlewares.append(DanglingToolCallMiddleware()) - middlewares.append(LLMErrorHandlingMiddleware()) + middlewares.append(LLMErrorHandlingMiddleware(app_config=app_config)) # Guardrail middleware (if configured) - from deerflow.config.guardrails_config import get_guardrails_config - - guardrails_config = get_guardrails_config() + guardrails_config = app_config.guardrails if guardrails_config.enabled and guardrails_config.provider: import inspect @@ -125,18 +126,20 @@ def _build_runtime_middlewares( return middlewares -def build_lead_runtime_middlewares(*, lazy_init: bool = True) -> list[AgentMiddleware]: +def build_lead_runtime_middlewares(*, app_config: AppConfig, lazy_init: bool = True) -> list[AgentMiddleware]: """Middlewares shared by lead agent runtime before lead-only middlewares.""" return _build_runtime_middlewares( + app_config=app_config, include_uploads=True, include_dangling_tool_call_patch=True, lazy_init=lazy_init, ) -def build_subagent_runtime_middlewares(*, lazy_init: bool = True) -> list[AgentMiddleware]: +def build_subagent_runtime_middlewares(*, app_config: AppConfig, lazy_init: bool = True) -> list[AgentMiddleware]: """Middlewares shared by subagent runtime before subagent-only middlewares.""" return _build_runtime_middlewares( + app_config=app_config, include_uploads=False, include_dangling_tool_call_patch=True, lazy_init=lazy_init, diff --git a/backend/packages/harness/deerflow/agents/middlewares/uploads_middleware.py b/backend/packages/harness/deerflow/agents/middlewares/uploads_middleware.py index 0fb217bcc..5a9ee8301 100644 --- a/backend/packages/harness/deerflow/agents/middlewares/uploads_middleware.py +++ b/backend/packages/harness/deerflow/agents/middlewares/uploads_middleware.py @@ -10,6 +10,7 @@ from langchain_core.messages import HumanMessage from langgraph.runtime import Runtime from deerflow.config.paths import Paths, get_paths +from deerflow.runtime.user_context import get_effective_user_id from deerflow.utils.file_conversion import extract_outline logger = logging.getLogger(__name__) @@ -221,7 +222,7 @@ class UploadsMiddleware(AgentMiddleware[UploadsMiddlewareState]): thread_id = get_config().get("configurable", {}).get("thread_id") except RuntimeError: pass # get_config() raises outside a runnable context (e.g. unit tests) - uploads_dir = self._paths.sandbox_uploads_dir(thread_id) if thread_id else None + uploads_dir = self._paths.sandbox_uploads_dir(thread_id, user_id=get_effective_user_id()) if thread_id else None # Get newly uploaded files from the current message's additional_kwargs.files new_files = self._files_from_kwargs(last_message, uploads_dir) or [] @@ -282,6 +283,7 @@ class UploadsMiddleware(AgentMiddleware[UploadsMiddlewareState]): updated_message = HumanMessage( content=updated_content, id=last_message.id, + name=last_message.name, additional_kwargs=last_message.additional_kwargs, ) diff --git a/backend/packages/harness/deerflow/client.py b/backend/packages/harness/deerflow/client.py index a26d838af..2ba9302cc 100644 --- a/backend/packages/harness/deerflow/client.py +++ b/backend/packages/harness/deerflow/client.py @@ -40,7 +40,8 @@ from deerflow.config.app_config import get_app_config, reload_app_config from deerflow.config.extensions_config import ExtensionsConfig, SkillStateConfig, get_extensions_config, reload_extensions_config from deerflow.config.paths import get_paths from deerflow.models import create_chat_model -from deerflow.skills.installer import install_skill_from_archive +from deerflow.runtime.user_context import get_effective_user_id +from deerflow.skills.storage import get_or_new_skill_storage from deerflow.uploads.manager import ( claim_unique_filename, delete_file_safe, @@ -240,7 +241,7 @@ class DeerFlowClient: } checkpointer = self._checkpointer if checkpointer is None: - from deerflow.agents.checkpointer import get_checkpointer + from deerflow.runtime.checkpointer import get_checkpointer checkpointer = get_checkpointer() if checkpointer is not None: @@ -374,7 +375,7 @@ class DeerFlowClient: """ checkpointer = self._checkpointer if checkpointer is None: - from deerflow.agents.checkpointer.provider import get_checkpointer + from deerflow.runtime.checkpointer.provider import get_checkpointer checkpointer = get_checkpointer() @@ -429,7 +430,7 @@ class DeerFlowClient: """ checkpointer = self._checkpointer if checkpointer is None: - from deerflow.agents.checkpointer.provider import get_checkpointer + from deerflow.runtime.checkpointer.provider import get_checkpointer checkpointer = get_checkpointer() @@ -751,8 +752,6 @@ class DeerFlowClient: Dict with "skills" key containing list of skill info dicts, matching the Gateway API ``SkillsListResponse`` schema. """ - from deerflow.skills.loader import load_skills - return { "skills": [ { @@ -762,7 +761,7 @@ class DeerFlowClient: "category": s.category, "enabled": s.enabled, } - for s in load_skills(enabled_only=enabled_only) + for s in get_or_new_skill_storage().load_skills(enabled_only=enabled_only) ] } @@ -774,19 +773,19 @@ class DeerFlowClient: """ from deerflow.agents.memory.updater import get_memory_data - return get_memory_data() + return get_memory_data(user_id=get_effective_user_id()) def export_memory(self) -> dict: """Export current memory data for backup or transfer.""" from deerflow.agents.memory.updater import get_memory_data - return get_memory_data() + return get_memory_data(user_id=get_effective_user_id()) def import_memory(self, memory_data: dict) -> dict: """Import and persist full memory data.""" from deerflow.agents.memory.updater import import_memory_data - return import_memory_data(memory_data) + return import_memory_data(memory_data, user_id=get_effective_user_id()) def get_model(self, name: str) -> dict | None: """Get a specific model's configuration by name. @@ -871,9 +870,9 @@ class DeerFlowClient: Returns: Skill info dict, or None if not found. """ - from deerflow.skills.loader import load_skills + from deerflow.skills.storage import get_or_new_skill_storage - skill = next((s for s in load_skills(enabled_only=False) if s.name == name), None) + skill = next((s for s in get_or_new_skill_storage().load_skills(enabled_only=False) if s.name == name), None) if skill is None: return None return { @@ -898,9 +897,9 @@ class DeerFlowClient: ValueError: If the skill is not found. OSError: If the config file cannot be written. """ - from deerflow.skills.loader import load_skills + from deerflow.skills.storage import get_or_new_skill_storage - skills = load_skills(enabled_only=False) + skills = get_or_new_skill_storage().load_skills(enabled_only=False) skill = next((s for s in skills if s.name == name), None) if skill is None: raise ValueError(f"Skill '{name}' not found") @@ -923,7 +922,7 @@ class DeerFlowClient: self._agent_config_key = None reload_extensions_config() - updated = next((s for s in load_skills(enabled_only=False) if s.name == name), None) + updated = next((s for s in get_or_new_skill_storage().load_skills(enabled_only=False) if s.name == name), None) if updated is None: raise RuntimeError(f"Skill '{name}' disappeared after update") return { @@ -947,7 +946,7 @@ class DeerFlowClient: FileNotFoundError: If the file does not exist. ValueError: If the file is invalid. """ - return install_skill_from_archive(skill_path) + return get_or_new_skill_storage().install_skill_from_archive(skill_path) # ------------------------------------------------------------------ # Public API — memory management @@ -961,13 +960,13 @@ class DeerFlowClient: """ from deerflow.agents.memory.updater import reload_memory_data - return reload_memory_data() + return reload_memory_data(user_id=get_effective_user_id()) def clear_memory(self) -> dict: """Clear all persisted memory data.""" from deerflow.agents.memory.updater import clear_memory_data - return clear_memory_data() + return clear_memory_data(user_id=get_effective_user_id()) def create_memory_fact(self, content: str, category: str = "context", confidence: float = 0.5) -> dict: """Create a single fact manually.""" @@ -1184,7 +1183,7 @@ class DeerFlowClient: ValueError: If the path is invalid. """ try: - actual = get_paths().resolve_virtual_path(thread_id, path) + actual = get_paths().resolve_virtual_path(thread_id, path, user_id=get_effective_user_id()) except ValueError as exc: if "traversal" in str(exc): from deerflow.uploads.manager import PathTraversalError diff --git a/backend/packages/harness/deerflow/community/aio_sandbox/aio_sandbox_provider.py b/backend/packages/harness/deerflow/community/aio_sandbox/aio_sandbox_provider.py index 952b6731b..292a43758 100644 --- a/backend/packages/harness/deerflow/community/aio_sandbox/aio_sandbox_provider.py +++ b/backend/packages/harness/deerflow/community/aio_sandbox/aio_sandbox_provider.py @@ -27,6 +27,7 @@ except ImportError: # pragma: no cover - Windows fallback from deerflow.config import get_app_config from deerflow.config.paths import VIRTUAL_PATH_PREFIX, get_paths +from deerflow.runtime.user_context import get_effective_user_id from deerflow.sandbox.sandbox import Sandbox from deerflow.sandbox.sandbox_provider import SandboxProvider @@ -270,15 +271,16 @@ class AioSandboxProvider(SandboxProvider): mounted Docker socket (DooD), the host Docker daemon can resolve the paths. """ paths = get_paths() - paths.ensure_thread_dirs(thread_id) + user_id = get_effective_user_id() + paths.ensure_thread_dirs(thread_id, user_id=user_id) return [ - (paths.host_sandbox_work_dir(thread_id), f"{VIRTUAL_PATH_PREFIX}/workspace", False), - (paths.host_sandbox_uploads_dir(thread_id), f"{VIRTUAL_PATH_PREFIX}/uploads", False), - (paths.host_sandbox_outputs_dir(thread_id), f"{VIRTUAL_PATH_PREFIX}/outputs", False), + (paths.host_sandbox_work_dir(thread_id, user_id=user_id), f"{VIRTUAL_PATH_PREFIX}/workspace", False), + (paths.host_sandbox_uploads_dir(thread_id, user_id=user_id), f"{VIRTUAL_PATH_PREFIX}/uploads", False), + (paths.host_sandbox_outputs_dir(thread_id, user_id=user_id), f"{VIRTUAL_PATH_PREFIX}/outputs", False), # ACP workspace: read-only inside the sandbox (lead agent reads results; # the ACP subprocess writes from the host side, not from within the container). - (paths.host_acp_workspace_dir(thread_id), "/mnt/acp-workspace", True), + (paths.host_acp_workspace_dir(thread_id, user_id=user_id), "/mnt/acp-workspace", True), ] @staticmethod @@ -490,8 +492,9 @@ class AioSandboxProvider(SandboxProvider): across multiple processes, preventing container-name conflicts. """ paths = get_paths() - paths.ensure_thread_dirs(thread_id) - lock_path = paths.thread_dir(thread_id) / f"{sandbox_id}.lock" + user_id = get_effective_user_id() + paths.ensure_thread_dirs(thread_id, user_id=user_id) + lock_path = paths.thread_dir(thread_id, user_id=user_id) / f"{sandbox_id}.lock" with open(lock_path, "a", encoding="utf-8") as lock_file: locked = False diff --git a/backend/packages/harness/deerflow/community/aio_sandbox/local_backend.py b/backend/packages/harness/deerflow/community/aio_sandbox/local_backend.py index 4b680df2d..92d933d89 100644 --- a/backend/packages/harness/deerflow/community/aio_sandbox/local_backend.py +++ b/backend/packages/harness/deerflow/community/aio_sandbox/local_backend.py @@ -9,6 +9,7 @@ from __future__ import annotations import json import logging import os +import shlex import subprocess from datetime import datetime @@ -86,6 +87,88 @@ def _format_container_mount(runtime: str, host_path: str, container_path: str, r return ["-v", mount_spec] +def _redact_container_command_for_log(cmd: list[str]) -> list[str]: + """Return a Docker/Container command with environment values redacted.""" + redacted: list[str] = [] + redact_next_env = False + + for arg in cmd: + if redact_next_env: + if "=" in arg: + key = arg.split("=", 1)[0] + redacted.append(f"{key}=" if key else "") + else: + redacted.append(arg) + redact_next_env = False + continue + + if arg in {"-e", "--env"}: + redacted.append(arg) + redact_next_env = True + continue + + if arg.startswith("--env="): + value = arg.removeprefix("--env=") + if "=" in value: + key = value.split("=", 1)[0] + redacted.append(f"--env={key}=" if key else "--env=") + else: + redacted.append(arg) + continue + + redacted.append(arg) + + return redacted + + +def _format_container_command_for_log(cmd: list[str]) -> str: + if os.name == "nt": + return subprocess.list2cmdline(cmd) + return shlex.join(cmd) + + +def _normalize_sandbox_host(host: str) -> str: + return host.strip().lower() + + +def _is_ipv6_loopback_sandbox_host(host: str) -> bool: + return _normalize_sandbox_host(host) in {"::1", "[::1]"} + + +def _is_loopback_sandbox_host(host: str) -> bool: + return _normalize_sandbox_host(host) in {"", "localhost", "127.0.0.1", "::1", "[::1]"} + + +def _resolve_docker_bind_host(sandbox_host: str | None = None, bind_host: str | None = None) -> str: + """Choose the host interface for legacy Docker ``-p`` sandbox publishing. + + Bare-metal/local runs talk to sandboxes through localhost and should not + expose the sandbox HTTP API on every host interface. Docker-outside-of- + Docker deployments commonly use ``host.docker.internal`` from another + container; keep their legacy broad bind unless operators opt into a + narrower bind with ``DEER_FLOW_SANDBOX_BIND_HOST``. When operators choose + an IPv6 loopback sandbox host, bind Docker to IPv6 loopback as well so the + advertised sandbox URL and published socket use the same address family. + """ + explicit_bind = bind_host if bind_host is not None else os.environ.get("DEER_FLOW_SANDBOX_BIND_HOST") + if explicit_bind is not None: + explicit_bind = explicit_bind.strip() + if explicit_bind: + logger.debug("Docker sandbox bind: %s (explicit bind host override)", explicit_bind) + return explicit_bind + + host = sandbox_host if sandbox_host is not None else os.environ.get("DEER_FLOW_SANDBOX_HOST", "localhost") + if _is_ipv6_loopback_sandbox_host(host): + logger.debug("Docker sandbox bind: [::1] (IPv6 loopback sandbox host)") + return "[::1]" + if _is_loopback_sandbox_host(host): + logger.debug("Docker sandbox bind: 127.0.0.1 (loopback default)") + return "127.0.0.1" + + logger.debug("Docker sandbox bind: 0.0.0.0 (non-loopback sandbox host compatibility)") + return "0.0.0.0" + + class LocalContainerBackend(SandboxBackend): """Backend that manages sandbox containers locally using Docker or Apple Container. @@ -424,12 +507,17 @@ class LocalContainerBackend(SandboxBackend): if self._runtime == "docker": cmd.extend(["--security-opt", "seccomp=unconfined"]) + if self._runtime == "docker": + port_mapping = f"{_resolve_docker_bind_host()}:{port}:8080" + else: + port_mapping = f"{port}:8080" + cmd.extend( [ "--rm", "-d", "-p", - f"{port}:8080", + port_mapping, "--name", container_name, ] @@ -464,7 +552,8 @@ class LocalContainerBackend(SandboxBackend): cmd.append(self._image) - logger.info(f"Starting container using {self._runtime}: {' '.join(cmd)}") + log_cmd = _format_container_command_for_log(_redact_container_command_for_log(cmd)) + logger.info(f"Starting container using {self._runtime}: {log_cmd}") try: result = subprocess.run(cmd, capture_output=True, text=True, check=True) diff --git a/backend/packages/harness/deerflow/community/jina_ai/jina_client.py b/backend/packages/harness/deerflow/community/jina_ai/jina_client.py index 3adc5458a..c4fc1ac81 100644 --- a/backend/packages/harness/deerflow/community/jina_ai/jina_client.py +++ b/backend/packages/harness/deerflow/community/jina_ai/jina_client.py @@ -38,6 +38,6 @@ class JinaClient: return response.text except Exception as e: - error_message = f"Request to Jina API failed: {str(e)}" - logger.exception(error_message) + error_message = f"Request to Jina API failed: {type(e).__name__}: {e}" + logger.warning(error_message) return f"Error: {error_message}" diff --git a/backend/packages/harness/deerflow/config/app_config.py b/backend/packages/harness/deerflow/config/app_config.py index 2aa81c9f0..b31d396a5 100644 --- a/backend/packages/harness/deerflow/config/app_config.py +++ b/backend/packages/harness/deerflow/config/app_config.py @@ -11,10 +11,12 @@ from pydantic import BaseModel, ConfigDict, Field from deerflow.config.acp_config import load_acp_config_from_dict from deerflow.config.agents_api_config import AgentsApiConfig, load_agents_api_config_from_dict from deerflow.config.checkpointer_config import CheckpointerConfig, load_checkpointer_config_from_dict +from deerflow.config.database_config import DatabaseConfig from deerflow.config.extensions_config import ExtensionsConfig from deerflow.config.guardrails_config import GuardrailsConfig, load_guardrails_config_from_dict from deerflow.config.memory_config import MemoryConfig, load_memory_config_from_dict from deerflow.config.model_config import ModelConfig +from deerflow.config.run_events_config import RunEventsConfig from deerflow.config.sandbox_config import SandboxConfig from deerflow.config.skill_evolution_config import SkillEvolutionConfig from deerflow.config.skills_config import SkillsConfig @@ -31,6 +33,12 @@ load_dotenv() logger = logging.getLogger(__name__) +CONFIG_FILE_DATABASE_DEFAULTS = { + "backend": "sqlite", + "sqlite_dir": ".deer-flow/data", +} + + class CircuitBreakerConfig(BaseModel): """Configuration for the LLM Circuit Breaker.""" @@ -45,10 +53,34 @@ def _default_config_candidates() -> tuple[Path, ...]: return (backend_dir / "config.yaml", repo_root / "config.yaml") +def logging_level_from_config(name: str | None) -> int: + """Map ``config.yaml`` ``log_level`` string to a :mod:`logging` level constant.""" + mapping = logging.getLevelNamesMapping() + return mapping.get((name or "info").strip().upper(), logging.INFO) + + +def apply_logging_level(name: str | None) -> None: + """Resolve *name* to a logging level and apply it to the ``deerflow``/``app`` logger hierarchies. + + Only the ``deerflow`` and ``app`` logger levels are changed so that + third-party library verbosity (e.g. uvicorn, sqlalchemy) is not + affected. Root handler levels are lowered (never raised) so that + messages from the configured loggers can propagate through without + being filtered, while preserving handler thresholds that may be + intentionally restrictive for third-party log output. + """ + level = logging_level_from_config(name) + for logger_name in ("deerflow", "app"): + logging.getLogger(logger_name).setLevel(level) + for handler in logging.root.handlers: + if level < handler.level: + handler.setLevel(level) + + class AppConfig(BaseModel): """Config for the DeerFlow application""" - log_level: str = Field(default="info", description="Logging level for deerflow modules (debug/info/warning/error)") + log_level: str = Field(default="info", description="Logging level for deerflow and app modules (debug/info/warning/error); third-party libraries are not affected") token_usage: TokenUsageConfig = Field(default_factory=TokenUsageConfig, description="Token usage tracking configuration") models: list[ModelConfig] = Field(default_factory=list, description="Available models") sandbox: SandboxConfig = Field(description="Sandbox configuration") @@ -65,7 +97,9 @@ class AppConfig(BaseModel): subagents: SubagentsAppConfig = Field(default_factory=SubagentsAppConfig, description="Subagent runtime configuration") guardrails: GuardrailsConfig = Field(default_factory=GuardrailsConfig, description="Guardrail middleware configuration") circuit_breaker: CircuitBreakerConfig = Field(default_factory=CircuitBreakerConfig, description="LLM circuit breaker configuration") - model_config = ConfigDict(extra="allow", frozen=False) + model_config = ConfigDict(extra="allow") + database: DatabaseConfig = Field(default_factory=DatabaseConfig, description="Unified database backend configuration") + run_events: RunEventsConfig = Field(default_factory=RunEventsConfig, description="Run event storage configuration") checkpointer: CheckpointerConfig | None = Field(default=None, description="Checkpointer configuration") stream_bridge: StreamBridgeConfig | None = Field(default=None, description="Stream bridge configuration") @@ -114,6 +148,7 @@ class AppConfig(BaseModel): cls._check_config_version(config_data, resolved_path) config_data = cls.resolve_env_variables(config_data) + cls._apply_database_defaults(config_data) # Load title config if present if "title" in config_data: @@ -165,6 +200,18 @@ class AppConfig(BaseModel): result = cls.model_validate(config_data) return result + @classmethod + def _apply_database_defaults(cls, config_data: dict[str, Any]) -> None: + """Apply config.yaml defaults for persistence when the section is absent.""" + database_config = config_data.get("database") + if database_config is None: + database_config = {} + config_data["database"] = database_config + if not isinstance(database_config, dict): + return + for key, value in CONFIG_FILE_DATABASE_DEFAULTS.items(): + database_config.setdefault(key, value) + @classmethod def _check_config_version(cls, config_data: dict, config_path: Path) -> None: """Check if the user's config.yaml is outdated compared to config.example.yaml. @@ -269,6 +316,9 @@ class AppConfig(BaseModel): return next((group for group in self.tool_groups if group.name == name), None) +# Compatibility singleton layer for code paths that have not yet been +# migrated to explicit ``AppConfig`` threading. New composition roots should +# prefer constructing ``AppConfig`` once and passing it down directly. _app_config: AppConfig | None = None _app_config_path: Path | None = None _app_config_mtime: float | None = None diff --git a/backend/packages/harness/deerflow/config/database_config.py b/backend/packages/harness/deerflow/config/database_config.py new file mode 100644 index 000000000..37cfd579d --- /dev/null +++ b/backend/packages/harness/deerflow/config/database_config.py @@ -0,0 +1,102 @@ +"""Unified database backend configuration. + +Controls BOTH the LangGraph checkpointer and the DeerFlow application +persistence layer (runs, threads metadata, users, etc.). The user +configures one backend; the system handles physical separation details. + +SQLite mode: checkpointer and app share a single .db file +({sqlite_dir}/deerflow.db) with WAL journal mode enabled on every +connection. WAL allows concurrent readers and a single writer without +blocking, making a unified file safe for both workloads. Writers +that contend for the lock wait via the default 5-second sqlite3 +busy timeout rather than failing immediately. + +Postgres mode: both use the same database URL but maintain independent +connection pools with different lifecycles. + +Memory mode: checkpointer uses MemorySaver, app uses in-memory stores. +No database is initialized. + +Sensitive values (postgres_url) should use $VAR syntax in config.yaml +to reference environment variables from .env: + + database: + backend: postgres + postgres_url: $DATABASE_URL + +The $VAR resolution is handled by AppConfig.resolve_env_variables() +before this config is instantiated -- DatabaseConfig itself does not +need to do any environment variable processing. +""" + +from __future__ import annotations + +import os +from typing import Literal + +from pydantic import BaseModel, Field + + +class DatabaseConfig(BaseModel): + backend: Literal["memory", "sqlite", "postgres"] = Field( + default="memory", + description=("Storage backend for both checkpointer and application data. 'memory' for development (no persistence across restarts), 'sqlite' for single-node deployment, 'postgres' for production multi-node deployment."), + ) + sqlite_dir: str = Field( + default=".deer-flow/data", + description=("Directory for the SQLite database file. Both checkpointer and application data share {sqlite_dir}/deerflow.db."), + ) + postgres_url: str = Field( + default="", + description=( + "PostgreSQL connection URL, shared by checkpointer and app. " + "Use $DATABASE_URL in config.yaml to reference .env. " + "Example: postgresql://user:pass@host:5432/deerflow " + "(the +asyncpg driver suffix is added automatically where needed)." + ), + ) + echo_sql: bool = Field( + default=False, + description="Echo all SQL statements to log (debug only).", + ) + pool_size: int = Field( + default=5, + description="Connection pool size for the app ORM engine (postgres only).", + ) + + # -- Derived helpers (not user-configured) -- + + @property + def _resolved_sqlite_dir(self) -> str: + """Resolve sqlite_dir to an absolute path (relative to CWD).""" + from pathlib import Path + + return str(Path(self.sqlite_dir).resolve()) + + @property + def sqlite_path(self) -> str: + """Unified SQLite file path shared by checkpointer and app.""" + return os.path.join(self._resolved_sqlite_dir, "deerflow.db") + + # Backward-compatible aliases + @property + def checkpointer_sqlite_path(self) -> str: + """SQLite file path for the LangGraph checkpointer (alias for sqlite_path).""" + return self.sqlite_path + + @property + def app_sqlite_path(self) -> str: + """SQLite file path for application ORM data (alias for sqlite_path).""" + return self.sqlite_path + + @property + def app_sqlalchemy_url(self) -> str: + """SQLAlchemy async URL for the application ORM engine.""" + if self.backend == "sqlite": + return f"sqlite+aiosqlite:///{self.sqlite_path}" + if self.backend == "postgres": + url = self.postgres_url + if url.startswith("postgresql://"): + url = url.replace("postgresql://", "postgresql+asyncpg://", 1) + return url + raise ValueError(f"No SQLAlchemy URL for backend={self.backend!r}") diff --git a/backend/packages/harness/deerflow/config/memory_config.py b/backend/packages/harness/deerflow/config/memory_config.py index 8565aa216..f9153262f 100644 --- a/backend/packages/harness/deerflow/config/memory_config.py +++ b/backend/packages/harness/deerflow/config/memory_config.py @@ -14,8 +14,9 @@ class MemoryConfig(BaseModel): default="", description=( "Path to store memory data. " - "If empty, defaults to `{base_dir}/memory.json` (see Paths.memory_file). " - "Absolute paths are used as-is. " + "If empty, defaults to per-user memory at `{base_dir}/users/{user_id}/memory.json`. " + "Absolute paths are used as-is and opt out of per-user isolation " + "(all users share the same file). " "Relative paths are resolved against `Paths.base_dir` " "(not the backend working directory). " "Note: if you previously set this to `.deer-flow/memory.json`, " diff --git a/backend/packages/harness/deerflow/config/paths.py b/backend/packages/harness/deerflow/config/paths.py index 2d5661e63..f1ce7eae1 100644 --- a/backend/packages/harness/deerflow/config/paths.py +++ b/backend/packages/harness/deerflow/config/paths.py @@ -7,6 +7,7 @@ from pathlib import Path, PureWindowsPath VIRTUAL_PATH_PREFIX = "/mnt/user-data" _SAFE_THREAD_ID_RE = re.compile(r"^[A-Za-z0-9_\-]+$") +_SAFE_USER_ID_RE = re.compile(r"^[A-Za-z0-9_\-]+$") def _default_local_base_dir() -> Path: @@ -22,6 +23,13 @@ def _validate_thread_id(thread_id: str) -> str: return thread_id +def _validate_user_id(user_id: str) -> str: + """Validate a user ID before using it in filesystem paths.""" + if not _SAFE_USER_ID_RE.match(user_id): + raise ValueError(f"Invalid user_id {user_id!r}: only alphanumeric characters, hyphens, and underscores are allowed.") + return user_id + + def _join_host_path(base: str, *parts: str) -> str: """Join host filesystem path segments while preserving native style. @@ -134,44 +142,63 @@ class Paths: """Per-agent memory file: `{base_dir}/agents/{name}/memory.json`.""" return self.agent_dir(name) / "memory.json" - def thread_dir(self, thread_id: str) -> Path: + def user_dir(self, user_id: str) -> Path: + """Directory for a specific user: `{base_dir}/users/{user_id}/`.""" + return self.base_dir / "users" / _validate_user_id(user_id) + + def user_memory_file(self, user_id: str) -> Path: + """Per-user memory file: `{base_dir}/users/{user_id}/memory.json`.""" + return self.user_dir(user_id) / "memory.json" + + def user_agent_memory_file(self, user_id: str, agent_name: str) -> Path: + """Per-user per-agent memory: `{base_dir}/users/{user_id}/agents/{name}/memory.json`.""" + return self.user_dir(user_id) / "agents" / agent_name.lower() / "memory.json" + + def thread_dir(self, thread_id: str, *, user_id: str | None = None) -> Path: """ - Host path for a thread's data: `{base_dir}/threads/{thread_id}/` + Host path for a thread's data. + + When *user_id* is provided: + `{base_dir}/users/{user_id}/threads/{thread_id}/` + Otherwise (legacy layout): + `{base_dir}/threads/{thread_id}/` This directory contains a `user-data/` subdirectory that is mounted as `/mnt/user-data/` inside the sandbox. Raises: - ValueError: If `thread_id` contains unsafe characters (path separators - or `..`) that could cause directory traversal. + ValueError: If `thread_id` or `user_id` contains unsafe characters (path + separators or `..`) that could cause directory traversal. """ + if user_id is not None: + return self.user_dir(user_id) / "threads" / _validate_thread_id(thread_id) return self.base_dir / "threads" / _validate_thread_id(thread_id) - def sandbox_work_dir(self, thread_id: str) -> Path: + def sandbox_work_dir(self, thread_id: str, *, user_id: str | None = None) -> Path: """ Host path for the agent's workspace directory. Host: `{base_dir}/threads/{thread_id}/user-data/workspace/` Sandbox: `/mnt/user-data/workspace/` """ - return self.thread_dir(thread_id) / "user-data" / "workspace" + return self.thread_dir(thread_id, user_id=user_id) / "user-data" / "workspace" - def sandbox_uploads_dir(self, thread_id: str) -> Path: + def sandbox_uploads_dir(self, thread_id: str, *, user_id: str | None = None) -> Path: """ Host path for user-uploaded files. Host: `{base_dir}/threads/{thread_id}/user-data/uploads/` Sandbox: `/mnt/user-data/uploads/` """ - return self.thread_dir(thread_id) / "user-data" / "uploads" + return self.thread_dir(thread_id, user_id=user_id) / "user-data" / "uploads" - def sandbox_outputs_dir(self, thread_id: str) -> Path: + def sandbox_outputs_dir(self, thread_id: str, *, user_id: str | None = None) -> Path: """ Host path for agent-generated artifacts. Host: `{base_dir}/threads/{thread_id}/user-data/outputs/` Sandbox: `/mnt/user-data/outputs/` """ - return self.thread_dir(thread_id) / "user-data" / "outputs" + return self.thread_dir(thread_id, user_id=user_id) / "user-data" / "outputs" - def acp_workspace_dir(self, thread_id: str) -> Path: + def acp_workspace_dir(self, thread_id: str, *, user_id: str | None = None) -> Path: """ Host path for the ACP workspace of a specific thread. Host: `{base_dir}/threads/{thread_id}/acp-workspace/` @@ -180,41 +207,43 @@ class Paths: Each thread gets its own isolated ACP workspace so that concurrent sessions cannot read each other's ACP agent outputs. """ - return self.thread_dir(thread_id) / "acp-workspace" + return self.thread_dir(thread_id, user_id=user_id) / "acp-workspace" - def sandbox_user_data_dir(self, thread_id: str) -> Path: + def sandbox_user_data_dir(self, thread_id: str, *, user_id: str | None = None) -> Path: """ Host path for the user-data root. Host: `{base_dir}/threads/{thread_id}/user-data/` Sandbox: `/mnt/user-data/` """ - return self.thread_dir(thread_id) / "user-data" + return self.thread_dir(thread_id, user_id=user_id) / "user-data" - def host_thread_dir(self, thread_id: str) -> str: + def host_thread_dir(self, thread_id: str, *, user_id: str | None = None) -> str: """Host path for a thread directory, preserving Windows path syntax.""" + if user_id is not None: + return _join_host_path(self._host_base_dir_str(), "users", _validate_user_id(user_id), "threads", _validate_thread_id(thread_id)) return _join_host_path(self._host_base_dir_str(), "threads", _validate_thread_id(thread_id)) - def host_sandbox_user_data_dir(self, thread_id: str) -> str: + def host_sandbox_user_data_dir(self, thread_id: str, *, user_id: str | None = None) -> str: """Host path for a thread's user-data root.""" - return _join_host_path(self.host_thread_dir(thread_id), "user-data") + return _join_host_path(self.host_thread_dir(thread_id, user_id=user_id), "user-data") - def host_sandbox_work_dir(self, thread_id: str) -> str: + def host_sandbox_work_dir(self, thread_id: str, *, user_id: str | None = None) -> str: """Host path for the workspace mount source.""" - return _join_host_path(self.host_sandbox_user_data_dir(thread_id), "workspace") + return _join_host_path(self.host_sandbox_user_data_dir(thread_id, user_id=user_id), "workspace") - def host_sandbox_uploads_dir(self, thread_id: str) -> str: + def host_sandbox_uploads_dir(self, thread_id: str, *, user_id: str | None = None) -> str: """Host path for the uploads mount source.""" - return _join_host_path(self.host_sandbox_user_data_dir(thread_id), "uploads") + return _join_host_path(self.host_sandbox_user_data_dir(thread_id, user_id=user_id), "uploads") - def host_sandbox_outputs_dir(self, thread_id: str) -> str: + def host_sandbox_outputs_dir(self, thread_id: str, *, user_id: str | None = None) -> str: """Host path for the outputs mount source.""" - return _join_host_path(self.host_sandbox_user_data_dir(thread_id), "outputs") + return _join_host_path(self.host_sandbox_user_data_dir(thread_id, user_id=user_id), "outputs") - def host_acp_workspace_dir(self, thread_id: str) -> str: + def host_acp_workspace_dir(self, thread_id: str, *, user_id: str | None = None) -> str: """Host path for the ACP workspace mount source.""" - return _join_host_path(self.host_thread_dir(thread_id), "acp-workspace") + return _join_host_path(self.host_thread_dir(thread_id, user_id=user_id), "acp-workspace") - def ensure_thread_dirs(self, thread_id: str) -> None: + def ensure_thread_dirs(self, thread_id: str, *, user_id: str | None = None) -> None: """Create all standard sandbox directories for a thread. Directories are created with mode 0o777 so that sandbox containers @@ -228,24 +257,24 @@ class Paths: ACP agent invocation. """ for d in [ - self.sandbox_work_dir(thread_id), - self.sandbox_uploads_dir(thread_id), - self.sandbox_outputs_dir(thread_id), - self.acp_workspace_dir(thread_id), + self.sandbox_work_dir(thread_id, user_id=user_id), + self.sandbox_uploads_dir(thread_id, user_id=user_id), + self.sandbox_outputs_dir(thread_id, user_id=user_id), + self.acp_workspace_dir(thread_id, user_id=user_id), ]: d.mkdir(parents=True, exist_ok=True) d.chmod(0o777) - def delete_thread_dir(self, thread_id: str) -> None: + def delete_thread_dir(self, thread_id: str, *, user_id: str | None = None) -> None: """Delete all persisted data for a thread. The operation is idempotent: missing thread directories are ignored. """ - thread_dir = self.thread_dir(thread_id) + thread_dir = self.thread_dir(thread_id, user_id=user_id) if thread_dir.exists(): shutil.rmtree(thread_dir) - def resolve_virtual_path(self, thread_id: str, virtual_path: str) -> Path: + def resolve_virtual_path(self, thread_id: str, virtual_path: str, *, user_id: str | None = None) -> Path: """Resolve a sandbox virtual path to the actual host filesystem path. Args: @@ -253,6 +282,7 @@ class Paths: virtual_path: Virtual path as seen inside the sandbox, e.g. ``/mnt/user-data/outputs/report.pdf``. Leading slashes are stripped before matching. + user_id: Optional user ID for user-scoped path resolution. Returns: The resolved absolute host filesystem path. @@ -270,7 +300,7 @@ class Paths: raise ValueError(f"Path must start with /{prefix}") relative = stripped[len(prefix) :].lstrip("/") - base = self.sandbox_user_data_dir(thread_id).resolve() + base = self.sandbox_user_data_dir(thread_id, user_id=user_id).resolve() actual = (base / relative).resolve() try: diff --git a/backend/packages/harness/deerflow/config/run_events_config.py b/backend/packages/harness/deerflow/config/run_events_config.py new file mode 100644 index 000000000..cddd9061f --- /dev/null +++ b/backend/packages/harness/deerflow/config/run_events_config.py @@ -0,0 +1,33 @@ +"""Run event storage configuration. + +Controls where run events (messages + execution traces) are persisted. + +Backends: +- memory: In-memory storage, data lost on restart. Suitable for + development and testing. +- db: SQL database via SQLAlchemy ORM. Provides full query capability. + Suitable for production deployments. +- jsonl: Append-only JSONL files. Lightweight alternative for + single-node deployments that need persistence without a database. +""" + +from __future__ import annotations + +from typing import Literal + +from pydantic import BaseModel, Field + + +class RunEventsConfig(BaseModel): + backend: Literal["memory", "db", "jsonl"] = Field( + default="memory", + description="Storage backend for run events. 'memory' for development (no persistence), 'db' for production (SQL queries), 'jsonl' for lightweight single-node persistence.", + ) + max_trace_content: int = Field( + default=10240, + description="Maximum trace content size in bytes before truncation (db backend only).", + ) + track_token_usage: bool = Field( + default=True, + description="Whether RunJournal should accumulate token counts to RunRow.", + ) diff --git a/backend/packages/harness/deerflow/config/skills_config.py b/backend/packages/harness/deerflow/config/skills_config.py index 31a6ca902..266a98b91 100644 --- a/backend/packages/harness/deerflow/config/skills_config.py +++ b/backend/packages/harness/deerflow/config/skills_config.py @@ -11,6 +11,10 @@ def _default_repo_root() -> Path: class SkillsConfig(BaseModel): """Configuration for skills system""" + use: str = Field( + default="deerflow.skills.storage.local_skill_storage:LocalSkillStorage", + description="Class path of the SkillStorage implementation.", + ) path: str | None = Field( default=None, description="Path to skills directory. If not specified, defaults to ../skills relative to backend directory", @@ -35,10 +39,8 @@ class SkillsConfig(BaseModel): path = _default_repo_root() / path return path.resolve() else: - # Default: ../skills relative to backend directory - from deerflow.skills.loader import get_skills_root_path - - return get_skills_root_path() + # Default: /skills + return _default_repo_root() / "skills" def get_skill_container_path(self, skill_name: str, category: str = "public") -> str: """ diff --git a/backend/packages/harness/deerflow/config/subagents_config.py b/backend/packages/harness/deerflow/config/subagents_config.py index b5f885d5a..e7219284d 100644 --- a/backend/packages/harness/deerflow/config/subagents_config.py +++ b/backend/packages/harness/deerflow/config/subagents_config.py @@ -25,6 +25,47 @@ class SubagentOverrideConfig(BaseModel): min_length=1, description="Model name for this subagent (None = inherit from parent agent)", ) + skills: list[str] | None = Field( + default=None, + description="Skill names whitelist for this subagent (None = inherit all enabled skills, [] = no skills)", + ) + + +class CustomSubagentConfig(BaseModel): + """User-defined subagent type declared in config.yaml.""" + + description: str = Field( + description="When the lead agent should delegate to this subagent", + ) + system_prompt: str = Field( + description="System prompt that guides the subagent's behavior", + ) + tools: list[str] | None = Field( + default=None, + description="Tool names whitelist (None = inherit all tools from parent)", + ) + disallowed_tools: list[str] | None = Field( + default_factory=lambda: ["task", "ask_clarification", "present_files"], + description="Tool names to deny", + ) + skills: list[str] | None = Field( + default=None, + description="Skill names whitelist (None = inherit all enabled skills, [] = no skills)", + ) + model: str = Field( + default="inherit", + description="Model to use - 'inherit' uses parent's model", + ) + max_turns: int = Field( + default=50, + ge=1, + description="Maximum number of agent turns before stopping", + ) + timeout_seconds: int = Field( + default=900, + ge=1, + description="Maximum execution time in seconds", + ) class SubagentsAppConfig(BaseModel): @@ -44,6 +85,10 @@ class SubagentsAppConfig(BaseModel): default_factory=dict, description="Per-agent configuration overrides keyed by agent name", ) + custom_agents: dict[str, CustomSubagentConfig] = Field( + default_factory=dict, + description="User-defined subagent types keyed by agent name", + ) def get_timeout_for(self, agent_name: str) -> int: """Get the effective timeout for a specific agent. @@ -82,6 +127,20 @@ class SubagentsAppConfig(BaseModel): return self.max_turns return builtin_default + def get_skills_for(self, agent_name: str) -> list[str] | None: + """Get the skills override for a specific agent. + + Args: + agent_name: The name of the subagent. + + Returns: + Skill names whitelist if overridden, None otherwise (subagent will inherit all enabled skills). + """ + override = self.agents.get(agent_name) + if override is not None and override.skills is not None: + return override.skills + return None + _subagents_config: SubagentsAppConfig = SubagentsAppConfig() @@ -105,15 +164,20 @@ def load_subagents_config_from_dict(config_dict: dict) -> None: parts.append(f"max_turns={override.max_turns}") if override.model is not None: parts.append(f"model={override.model}") + if override.skills is not None: + parts.append(f"skills={override.skills}") if parts: overrides_summary[name] = ", ".join(parts) - if overrides_summary: + custom_agents_names = list(_subagents_config.custom_agents.keys()) + + if overrides_summary or custom_agents_names: logger.info( - "Subagents config loaded: default timeout=%ss, default max_turns=%s, per-agent overrides=%s", + "Subagents config loaded: default timeout=%ss, default max_turns=%s, per-agent overrides=%s, custom_agents=%s", _subagents_config.timeout_seconds, _subagents_config.max_turns, - overrides_summary, + overrides_summary or "none", + custom_agents_names or "none", ) else: logger.info( diff --git a/backend/packages/harness/deerflow/config/summarization_config.py b/backend/packages/harness/deerflow/config/summarization_config.py index f132e58cd..fab268ec5 100644 --- a/backend/packages/harness/deerflow/config/summarization_config.py +++ b/backend/packages/harness/deerflow/config/summarization_config.py @@ -51,6 +51,25 @@ class SummarizationConfig(BaseModel): default=None, description="Custom prompt template for generating summaries. If not provided, uses the default LangChain prompt.", ) + preserve_recent_skill_count: int = Field( + default=5, + ge=0, + description="Number of most-recently-loaded skill files to exclude from summarization. Set to 0 to disable skill preservation.", + ) + preserve_recent_skill_tokens: int = Field( + default=25000, + ge=0, + description="Total token budget reserved for recently-loaded skill files that must be preserved across summarization.", + ) + preserve_recent_skill_tokens_per_skill: int = Field( + default=5000, + ge=0, + description="Per-skill token cap when preserving skill files across summarization. Skill reads above this size are not rescued.", + ) + skill_file_read_tool_names: list[str] = Field( + default_factory=lambda: ["read_file", "read", "view", "cat"], + description="Tool names treated as skill file reads when preserving recently-loaded skills across summarization.", + ) # Global configuration instance diff --git a/backend/packages/harness/deerflow/mcp/tools.py b/backend/packages/harness/deerflow/mcp/tools.py index 718ac2ba3..bcd50c645 100644 --- a/backend/packages/harness/deerflow/mcp/tools.py +++ b/backend/packages/harness/deerflow/mcp/tools.py @@ -12,6 +12,7 @@ from langchain_core.tools import BaseTool from deerflow.config.extensions_config import ExtensionsConfig from deerflow.mcp.client import build_servers_config from deerflow.mcp.oauth import build_oauth_tool_interceptor, get_initial_oauth_headers +from deerflow.reflection import resolve_variable logger = logging.getLogger(__name__) @@ -95,6 +96,27 @@ async def get_mcp_tools() -> list[BaseTool]: if oauth_interceptor is not None: tool_interceptors.append(oauth_interceptor) + # Load custom interceptors declared in extensions_config.json + # Format: "mcpInterceptors": ["pkg.module:builder_func", ...] + raw_interceptor_paths = (extensions_config.model_extra or {}).get("mcpInterceptors") + if isinstance(raw_interceptor_paths, str): + raw_interceptor_paths = [raw_interceptor_paths] + elif not isinstance(raw_interceptor_paths, list): + if raw_interceptor_paths is not None: + logger.warning(f"mcpInterceptors must be a list of strings, got {type(raw_interceptor_paths).__name__}; skipping") + raw_interceptor_paths = [] + for interceptor_path in raw_interceptor_paths: + try: + builder = resolve_variable(interceptor_path) + interceptor = builder() + if callable(interceptor): + tool_interceptors.append(interceptor) + logger.info(f"Loaded MCP interceptor: {interceptor_path}") + elif interceptor is not None: + logger.warning(f"Builder {interceptor_path} returned non-callable {type(interceptor).__name__}; skipping") + except Exception as e: + logger.warning(f"Failed to load MCP interceptor {interceptor_path}: {e}", exc_info=True) + client = MultiServerMCPClient(servers_config, tool_interceptors=tool_interceptors, tool_name_prefix=True) # Get all tools from all servers diff --git a/backend/packages/harness/deerflow/models/claude_provider.py b/backend/packages/harness/deerflow/models/claude_provider.py index 2c0050313..35a15494d 100644 --- a/backend/packages/harness/deerflow/models/claude_provider.py +++ b/backend/packages/harness/deerflow/models/claude_provider.py @@ -190,23 +190,33 @@ class ClaudeChatModel(ChatAnthropic): ) def _apply_prompt_caching(self, payload: dict) -> None: - """Apply ephemeral cache_control to system and recent messages.""" - # Cache system messages + """Apply ephemeral cache_control to system, recent messages, and last tool definition. + + Uses a budget of MAX_CACHE_BREAKPOINTS (4) breakpoints — the hard limit + enforced by both the Anthropic API and AWS Bedrock. Breakpoints are + placed on the *last* eligible blocks because later breakpoints cover a + larger prefix and yield better cache hit rates. + """ + MAX_CACHE_BREAKPOINTS = 4 + + # Collect candidate blocks in document order: + # 1. system text blocks + # 2. content blocks of the last prompt_cache_size messages + # 3. the last tool definition + candidates: list[dict] = [] + + # 1. System blocks system = payload.get("system") if system and isinstance(system, list): for block in system: if isinstance(block, dict) and block.get("type") == "text": - block["cache_control"] = {"type": "ephemeral"} + candidates.append(block) elif system and isinstance(system, str): - payload["system"] = [ - { - "type": "text", - "text": system, - "cache_control": {"type": "ephemeral"}, - } - ] + new_block: dict = {"type": "text", "text": system} + payload["system"] = [new_block] + candidates.append(new_block) - # Cache recent messages + # 2. Recent message blocks messages = payload.get("messages", []) cache_start = max(0, len(messages) - self.prompt_cache_size) for i in range(cache_start, len(messages)): @@ -217,20 +227,21 @@ class ClaudeChatModel(ChatAnthropic): if isinstance(content, list): for block in content: if isinstance(block, dict): - block["cache_control"] = {"type": "ephemeral"} + candidates.append(block) elif isinstance(content, str) and content: - msg["content"] = [ - { - "type": "text", - "text": content, - "cache_control": {"type": "ephemeral"}, - } - ] + new_block = {"type": "text", "text": content} + msg["content"] = [new_block] + candidates.append(new_block) - # Cache the last tool definition + # 3. Last tool definition tools = payload.get("tools", []) if tools and isinstance(tools[-1], dict): - tools[-1]["cache_control"] = {"type": "ephemeral"} + candidates.append(tools[-1]) + + # Apply cache_control only to the last MAX_CACHE_BREAKPOINTS candidates + # to stay within the API limit. + for block in candidates[-MAX_CACHE_BREAKPOINTS:]: + block["cache_control"] = {"type": "ephemeral"} def _apply_thinking_budget(self, payload: dict) -> None: """Auto-allocate thinking budget (80% of max_tokens).""" diff --git a/backend/packages/harness/deerflow/models/factory.py b/backend/packages/harness/deerflow/models/factory.py index bd2828e94..518bdc9f1 100644 --- a/backend/packages/harness/deerflow/models/factory.py +++ b/backend/packages/harness/deerflow/models/factory.py @@ -3,6 +3,7 @@ import logging from langchain.chat_models import BaseChatModel from deerflow.config import get_app_config +from deerflow.config.app_config import AppConfig from deerflow.reflection import resolve_class from deerflow.tracing import build_tracing_callbacks @@ -46,7 +47,7 @@ def _enable_stream_usage_by_default(model_use_path: str, model_settings_from_con model_settings_from_config["stream_usage"] = True -def create_chat_model(name: str | None = None, thinking_enabled: bool = False, **kwargs) -> BaseChatModel: +def create_chat_model(name: str | None = None, thinking_enabled: bool = False, *, app_config: AppConfig | None = None, **kwargs) -> BaseChatModel: """Create a chat model instance from the config. Args: @@ -55,7 +56,7 @@ def create_chat_model(name: str | None = None, thinking_enabled: bool = False, * Returns: A chat model instance. """ - config = get_app_config() + config = app_config or get_app_config() if name is None: name = config.models[0].name model_config = config.get_model_config(name) @@ -131,7 +132,22 @@ def create_chat_model(name: str | None = None, thinking_enabled: bool = False, * elif "reasoning_effort" not in model_settings_from_config: model_settings_from_config["reasoning_effort"] = "medium" - model_instance = model_class(**{**model_settings_from_config, **kwargs}) + # For MindIE models: enforce conservative retry defaults. + # Timeout normalization is handled inside MindIEChatModel itself. + if getattr(model_class, "__name__", "") == "MindIEChatModel": + # Enforce max_retries constraint to prevent cascading timeouts. + model_settings_from_config["max_retries"] = model_settings_from_config.get("max_retries", 1) + + # Ensure stream_usage is enabled so that token usage metadata is available + # in streaming responses. LangChain's BaseChatOpenAI only defaults + # stream_usage=True when no custom base_url/api_base is set, so models + # hitting third-party endpoints (e.g. doubao, deepseek) silently lose + # usage data. We default it to True unless explicitly configured. + if "stream_usage" not in model_settings_from_config and "stream_usage" not in kwargs: + if "stream_usage" in getattr(model_class, "model_fields", {}): + model_settings_from_config["stream_usage"] = True + + model_instance = model_class(**kwargs, **model_settings_from_config) callbacks = build_tracing_callbacks() if callbacks: diff --git a/backend/packages/harness/deerflow/models/mindie_provider.py b/backend/packages/harness/deerflow/models/mindie_provider.py new file mode 100644 index 000000000..a75ae0aba --- /dev/null +++ b/backend/packages/harness/deerflow/models/mindie_provider.py @@ -0,0 +1,249 @@ +import ast +import html +import json +import re +import uuid +from collections.abc import Iterator + +import httpx +from langchain_core.messages import AIMessage, AIMessageChunk, HumanMessage, ToolMessage +from langchain_core.outputs import ChatGenerationChunk, ChatResult +from langchain_openai import ChatOpenAI + + +def _fix_messages(messages: list) -> list: + """Sanitize incoming messages for MindIE compatibility. + + MindIE's chat template may fail to parse LangChain's native tool_calls + or ToolMessage roles, resulting in 0-token generation errors. This function + flattens multi-modal list contents into strings and converts tool-related + messages into raw text with XML tags expected by the underlying model. + """ + fixed = [] + for msg in messages: + # Flatten content if it's a list of blocks + if isinstance(msg.content, list): + parts = [] + for block in msg.content: + if isinstance(block, str): + parts.append(block) + elif isinstance(block, dict) and block.get("type") == "text": + parts.append(block.get("text", "")) + text = "".join(parts) + else: + text = msg.content or "" + + # Convert AIMessage with tool_calls to raw XML text format + if isinstance(msg, AIMessage) and getattr(msg, "tool_calls", []): + xml_parts = [] + for tool in msg.tool_calls: + args_xml = " ".join(f"{html.escape(v if isinstance(v, str) else json.dumps(v, ensure_ascii=False), quote=False)}" for k, v in tool.get("args", {}).items()) + xml_parts.append(f" {args_xml} ") + full_text = f"{text}\n" + "\n".join(xml_parts) if text else "\n".join(xml_parts) + fixed.append(AIMessage(content=full_text.strip() or " ")) + continue + + # Wrap tool execution results in XML tags and convert to HumanMessage + if isinstance(msg, ToolMessage): + tool_result_text = f"\n{text}\n" + fixed.append(HumanMessage(content=tool_result_text)) + continue + + # Fallback to prevent completely empty message content + if not text.strip(): + text = " " + + fixed.append(msg.model_copy(update={"content": text})) + + return fixed + + +def _parse_xml_tool_call_to_dict(content: str) -> tuple[str, list[dict]]: + """Parse XML-style tool calls from model output into LangChain dicts. + + Args: + content: The raw text output from the model. + + Returns: + A tuple containing the cleaned text (with XML blocks removed) and + a list of tool call dictionaries formatted for LangChain. + """ + if not isinstance(content, str) or "" not in content: + return content, [] + + tool_calls = [] + clean_parts: list[str] = [] + cursor = 0 + for start, end, inner_content in _iter_tool_call_blocks(content): + clean_parts.append(content[cursor:start]) + cursor = end + + func_match = re.search(r"]+)>", inner_content) + if not func_match: + continue + function_name = html.unescape(func_match.group(1).strip()) + + # Ignore nested tool blocks when extracting parameters for this call. + # Nested `` sections represent separate invocations and + # their `` tags must not leak into the current call args. + param_source_parts: list[str] = [] + nested_cursor = 0 + for nested_start, nested_end, _ in _iter_tool_call_blocks(inner_content): + param_source_parts.append(inner_content[nested_cursor:nested_start]) + nested_cursor = nested_end + param_source_parts.append(inner_content[nested_cursor:]) + param_source = "".join(param_source_parts) + + args = {} + param_pattern = re.compile(r"]+)>(.*?)", re.DOTALL) + for param_match in param_pattern.finditer(param_source): + key = html.unescape(param_match.group(1).strip()) + raw_value = html.unescape(param_match.group(2).strip()) + + # Attempt to deserialize string values into native Python types + # to satisfy downstream Pydantic validation. + parsed_value = raw_value + if raw_value.startswith(("[", "{")) or raw_value in ("true", "false", "null") or raw_value.isdigit(): + try: + parsed_value = json.loads(raw_value) + except json.JSONDecodeError: + try: + parsed_value = ast.literal_eval(raw_value) + except (ValueError, SyntaxError): + pass + + args[key] = parsed_value + + tool_calls.append({"name": function_name, "args": args, "id": f"call_{uuid.uuid4().hex[:10]}"}) + clean_parts.append(content[cursor:]) + + return "".join(clean_parts).strip(), tool_calls + + +def _iter_tool_call_blocks(content: str) -> Iterator[tuple[int, int, str]]: + """Iterate `...` blocks and tolerate nesting.""" + token_pattern = re.compile(r"") + depth = 0 + block_start = -1 + + for match in token_pattern.finditer(content): + token = match.group(0) + if token == "": + if depth == 0: + block_start = match.start() + depth += 1 + continue + + if depth == 0: + continue + + depth -= 1 + if depth == 0 and block_start != -1: + block_end = match.end() + inner_start = block_start + len("") + inner_end = match.start() + yield block_start, block_end, content[inner_start:inner_end] + block_start = -1 + + +def _decode_escaped_newlines_outside_fences(content: str) -> str: + """Decode literal `\\n` outside fenced code blocks.""" + if "\\n" not in content: + return content + + parts = re.split(r"(```[\s\S]*?```)", content) + for idx, part in enumerate(parts): + if part.startswith("```"): + continue + parts[idx] = part.replace("\\n", "\n") + return "".join(parts) + + +class MindIEChatModel(ChatOpenAI): + """Chat model adapter for MindIE engine. + + Addresses compatibility issues including: + - Flattening multimodal list contents to strings. + - Intercepting and parsing hardcoded XML tool calls into LangChain standard. + - Handling stream=True dropping choices when tools are present by falling back + to non-streaming generation and yielding simulated chunks. + - Fixing over-escaped newline characters from gateway responses. + """ + + def __init__(self, **kwargs): + """Normalize timeout kwargs without creating long-lived clients.""" + connect_timeout = kwargs.pop("connect_timeout", 30.0) + read_timeout = kwargs.pop("read_timeout", 900.0) + write_timeout = kwargs.pop("write_timeout", 60.0) + pool_timeout = kwargs.pop("pool_timeout", 30.0) + + kwargs.setdefault( + "timeout", + httpx.Timeout( + connect=connect_timeout, + read=read_timeout, + write=write_timeout, + pool=pool_timeout, + ), + ) + super().__init__(**kwargs) + + def _patch_result_with_tools(self, result: ChatResult) -> ChatResult: + """Apply post-generation fixes to the model result.""" + for gen in result.generations: + msg = gen.message + + if isinstance(msg.content, str): + # Keep escaped newlines inside fenced code blocks untouched. + msg.content = _decode_escaped_newlines_outside_fences(msg.content) + + if "" in msg.content: + clean_content, extracted_tools = _parse_xml_tool_call_to_dict(msg.content) + + if extracted_tools: + msg.content = clean_content + if getattr(msg, "tool_calls", None) is None: + msg.tool_calls = [] + msg.tool_calls.extend(extracted_tools) + return result + + def _generate(self, messages, stop=None, run_manager=None, **kwargs): + result = super()._generate(_fix_messages(messages), stop=stop, run_manager=run_manager, **kwargs) + return self._patch_result_with_tools(result) + + async def _agenerate(self, messages, stop=None, run_manager=None, **kwargs): + result = await super()._agenerate(_fix_messages(messages), stop=stop, run_manager=run_manager, **kwargs) + return self._patch_result_with_tools(result) + + async def _astream(self, messages, stop=None, run_manager=None, **kwargs): + # Route standard queries to native streaming for lower TTFB + if not kwargs.get("tools"): + async for chunk in super()._astream(_fix_messages(messages), stop=stop, run_manager=run_manager, **kwargs): + if isinstance(chunk.message.content, str): + chunk.message.content = _decode_escaped_newlines_outside_fences(chunk.message.content) + yield chunk + return + + # Fallback for tool-enabled requests: + # MindIE currently drops choices when stream=True and tools are present. + # We await the full generation and yield chunks to simulate streaming. + result = await self._agenerate(messages, stop=stop, run_manager=run_manager, **kwargs) + + for gen in result.generations: + msg = gen.message + content = msg.content + standard_tool_calls = getattr(msg, "tool_calls", []) + + # Yield text in chunks to allow downstream UI/Markdown parsers to render smoothly + if isinstance(content, str) and content: + chunk_size = 15 + for i in range(0, len(content), chunk_size): + chunk_text = content[i : i + chunk_size] + chunk_msg = AIMessageChunk(content=chunk_text, id=msg.id, response_metadata=msg.response_metadata if i == 0 else {}) + yield ChatGenerationChunk(message=chunk_msg, generation_info=gen.generation_info if i == 0 else None) + + if standard_tool_calls: + yield ChatGenerationChunk(message=AIMessageChunk(content="", id=msg.id, tool_calls=standard_tool_calls, invalid_tool_calls=getattr(msg, "invalid_tool_calls", []))) + else: + chunk_msg = AIMessageChunk(content=content, id=msg.id, tool_calls=standard_tool_calls, invalid_tool_calls=getattr(msg, "invalid_tool_calls", [])) + yield ChatGenerationChunk(message=chunk_msg, generation_info=gen.generation_info) diff --git a/backend/packages/harness/deerflow/persistence/__init__.py b/backend/packages/harness/deerflow/persistence/__init__.py new file mode 100644 index 000000000..dfd64be95 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/__init__.py @@ -0,0 +1,13 @@ +"""DeerFlow application persistence layer (SQLAlchemy 2.0 async ORM). + +This module manages DeerFlow's own application data -- runs metadata, +thread ownership, cron jobs, users. It is completely separate from +LangGraph's checkpointer, which manages graph execution state. + +Usage: + from deerflow.persistence import init_engine, close_engine, get_session_factory +""" + +from deerflow.persistence.engine import close_engine, get_engine, get_session_factory, init_engine + +__all__ = ["close_engine", "get_engine", "get_session_factory", "init_engine"] diff --git a/backend/packages/harness/deerflow/persistence/base.py b/backend/packages/harness/deerflow/persistence/base.py new file mode 100644 index 000000000..fd99d5f74 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/base.py @@ -0,0 +1,40 @@ +"""SQLAlchemy declarative base with automatic to_dict support. + +All DeerFlow ORM models inherit from this Base. It provides a generic +to_dict() method via SQLAlchemy's inspect() so individual models don't +need to write their own serialization logic. + +LangGraph's checkpointer tables are NOT managed by this Base. +""" + +from __future__ import annotations + +from sqlalchemy import inspect as sa_inspect +from sqlalchemy.orm import DeclarativeBase + + +class Base(DeclarativeBase): + """Base class for all DeerFlow ORM models. + + Provides: + - Automatic to_dict() via SQLAlchemy column inspection. + - Standard __repr__() showing all column values. + """ + + def to_dict(self, *, exclude: set[str] | None = None) -> dict: + """Convert ORM instance to plain dict. + + Uses SQLAlchemy's inspect() to iterate mapped column attributes. + + Args: + exclude: Optional set of column keys to omit. + + Returns: + Dict of {column_key: value} for all mapped columns. + """ + exclude = exclude or set() + return {c.key: getattr(self, c.key) for c in sa_inspect(type(self)).mapper.column_attrs if c.key not in exclude} + + def __repr__(self) -> str: + cols = ", ".join(f"{c.key}={getattr(self, c.key)!r}" for c in sa_inspect(type(self)).mapper.column_attrs) + return f"{type(self).__name__}({cols})" diff --git a/backend/packages/harness/deerflow/persistence/engine.py b/backend/packages/harness/deerflow/persistence/engine.py new file mode 100644 index 000000000..2777c2450 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/engine.py @@ -0,0 +1,190 @@ +"""Async SQLAlchemy engine lifecycle management. + +Initializes at Gateway startup, provides session factory for +repositories, disposes at shutdown. + +When database.backend="memory", init_engine is a no-op and +get_session_factory() returns None. Repositories must check for +None and fall back to in-memory implementations. +""" + +from __future__ import annotations + +import json +import logging + +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine + + +def _json_serializer(obj: object) -> str: + """JSON serializer with ensure_ascii=False for Chinese character support.""" + return json.dumps(obj, ensure_ascii=False) + + +logger = logging.getLogger(__name__) + +_engine: AsyncEngine | None = None +_session_factory: async_sessionmaker[AsyncSession] | None = None + + +async def _auto_create_postgres_db(url: str) -> None: + """Connect to the ``postgres`` maintenance DB and CREATE DATABASE. + + The target database name is extracted from *url*. The connection is + made to the default ``postgres`` database on the same server using + ``AUTOCOMMIT`` isolation (CREATE DATABASE cannot run inside a + transaction). + """ + from sqlalchemy import text + from sqlalchemy.engine.url import make_url + + parsed = make_url(url) + db_name = parsed.database + if not db_name: + raise ValueError("Cannot auto-create database: no database name in URL") + + # Connect to the default 'postgres' database to issue CREATE DATABASE + maint_url = parsed.set(database="postgres") + maint_engine = create_async_engine(maint_url, isolation_level="AUTOCOMMIT") + try: + async with maint_engine.connect() as conn: + await conn.execute(text(f'CREATE DATABASE "{db_name}"')) + logger.info("Auto-created PostgreSQL database: %s", db_name) + finally: + await maint_engine.dispose() + + +async def init_engine( + backend: str, + *, + url: str = "", + echo: bool = False, + pool_size: int = 5, + sqlite_dir: str = "", +) -> None: + """Create the async engine and session factory, then auto-create tables. + + Args: + backend: "memory", "sqlite", or "postgres". + url: SQLAlchemy async URL (for sqlite/postgres). + echo: Echo SQL to log. + pool_size: Postgres connection pool size. + sqlite_dir: Directory to create for SQLite (ensured to exist). + """ + global _engine, _session_factory + + if backend == "memory": + logger.info("Persistence backend=memory -- ORM engine not initialized") + return + + if backend == "postgres": + try: + import asyncpg # noqa: F401 + except ImportError: + raise ImportError("database.backend is set to 'postgres' but asyncpg is not installed.\nInstall it with:\n uv sync --extra postgres\nOr switch to backend: sqlite in config.yaml for single-node deployment.") from None + + if backend == "sqlite": + import os + + from sqlalchemy import event + + os.makedirs(sqlite_dir or ".", exist_ok=True) + _engine = create_async_engine(url, echo=echo, json_serializer=_json_serializer) + + # Enable WAL on every new connection. SQLite PRAGMA settings are + # per-connection, so we wire the listener instead of running PRAGMA + # once at startup. WAL gives concurrent reads + writers without + # blocking and is the standard recommendation for any production + # SQLite deployment (TC-UPG-06 in AUTH_TEST_PLAN.md). The companion + # ``synchronous=NORMAL`` is the safe-and-fast pairing — fsync only + # at WAL checkpoint boundaries instead of every commit. + # Note: we do not set PRAGMA busy_timeout here — Python's sqlite3 + # driver already defaults to a 5-second busy timeout (see the + # ``timeout`` kwarg of ``sqlite3.connect``), and aiosqlite / + # SQLAlchemy's aiosqlite dialect inherit that default. Setting + # it again would be a no-op. + @event.listens_for(_engine.sync_engine, "connect") + def _enable_sqlite_wal(dbapi_conn, _record): # noqa: ARG001 — SQLAlchemy contract + cursor = dbapi_conn.cursor() + try: + cursor.execute("PRAGMA journal_mode=WAL;") + cursor.execute("PRAGMA synchronous=NORMAL;") + cursor.execute("PRAGMA foreign_keys=ON;") + finally: + cursor.close() + elif backend == "postgres": + _engine = create_async_engine( + url, + echo=echo, + pool_size=pool_size, + pool_pre_ping=True, + json_serializer=_json_serializer, + ) + else: + raise ValueError(f"Unknown persistence backend: {backend!r}") + + _session_factory = async_sessionmaker(_engine, expire_on_commit=False) + + # Auto-create tables (dev convenience). Production should use Alembic. + from deerflow.persistence.base import Base + + # Import all models so Base.metadata discovers them. + # When no models exist yet (scaffolding phase), this is a no-op. + try: + import deerflow.persistence.models # noqa: F401 + except ImportError: + # Models package not yet available — tables won't be auto-created. + # This is expected during initial scaffolding or minimal installs. + logger.debug("deerflow.persistence.models not found; skipping auto-create tables") + + try: + async with _engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + except Exception as exc: + if backend == "postgres" and "does not exist" in str(exc): + # Database not yet created — attempt to auto-create it, then retry. + await _auto_create_postgres_db(url) + # Rebuild engine against the now-existing database + await _engine.dispose() + _engine = create_async_engine(url, echo=echo, pool_size=pool_size, pool_pre_ping=True, json_serializer=_json_serializer) + _session_factory = async_sessionmaker(_engine, expire_on_commit=False) + async with _engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + else: + raise + + logger.info("Persistence engine initialized: backend=%s", backend) + + +async def init_engine_from_config(config) -> None: + """Convenience: init engine from a DatabaseConfig object.""" + if config.backend == "memory": + await init_engine("memory") + return + await init_engine( + backend=config.backend, + url=config.app_sqlalchemy_url, + echo=config.echo_sql, + pool_size=config.pool_size, + sqlite_dir=config.sqlite_dir if config.backend == "sqlite" else "", + ) + + +def get_session_factory() -> async_sessionmaker[AsyncSession] | None: + """Return the async session factory, or None if backend=memory.""" + return _session_factory + + +def get_engine() -> AsyncEngine | None: + """Return the async engine, or None if not initialized.""" + return _engine + + +async def close_engine() -> None: + """Dispose the engine, release all connections.""" + global _engine, _session_factory + if _engine is not None: + await _engine.dispose() + logger.info("Persistence engine closed") + _engine = None + _session_factory = None diff --git a/backend/packages/harness/deerflow/persistence/feedback/__init__.py b/backend/packages/harness/deerflow/persistence/feedback/__init__.py new file mode 100644 index 000000000..ee958b027 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/feedback/__init__.py @@ -0,0 +1,6 @@ +"""Feedback persistence — ORM and SQL repository.""" + +from deerflow.persistence.feedback.model import FeedbackRow +from deerflow.persistence.feedback.sql import FeedbackRepository + +__all__ = ["FeedbackRepository", "FeedbackRow"] diff --git a/backend/packages/harness/deerflow/persistence/feedback/model.py b/backend/packages/harness/deerflow/persistence/feedback/model.py new file mode 100644 index 000000000..a9b6479b3 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/feedback/model.py @@ -0,0 +1,32 @@ +"""ORM model for user feedback on runs.""" + +from __future__ import annotations + +from datetime import UTC, datetime + +from sqlalchemy import DateTime, String, Text, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column + +from deerflow.persistence.base import Base + + +class FeedbackRow(Base): + __tablename__ = "feedback" + + __table_args__ = (UniqueConstraint("thread_id", "run_id", "user_id", name="uq_feedback_thread_run_user"),) + + feedback_id: Mapped[str] = mapped_column(String(64), primary_key=True) + run_id: Mapped[str] = mapped_column(String(64), nullable=False, index=True) + thread_id: Mapped[str] = mapped_column(String(64), nullable=False, index=True) + user_id: Mapped[str | None] = mapped_column(String(64), index=True) + message_id: Mapped[str | None] = mapped_column(String(64)) + # message_id is an optional RunEventStore event identifier — + # allows feedback to target a specific message or the entire run + + rating: Mapped[int] = mapped_column(nullable=False) + # +1 (thumbs-up) or -1 (thumbs-down) + + comment: Mapped[str | None] = mapped_column(Text) + # Optional text feedback from the user + + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(UTC)) diff --git a/backend/packages/harness/deerflow/persistence/feedback/sql.py b/backend/packages/harness/deerflow/persistence/feedback/sql.py new file mode 100644 index 000000000..1db74ce84 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/feedback/sql.py @@ -0,0 +1,217 @@ +"""SQLAlchemy-backed feedback storage. + +Each method acquires its own short-lived session. +""" + +from __future__ import annotations + +import uuid +from datetime import UTC, datetime + +from sqlalchemy import case, func, select +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker + +from deerflow.persistence.feedback.model import FeedbackRow +from deerflow.runtime.user_context import AUTO, _AutoSentinel, resolve_user_id + + +class FeedbackRepository: + def __init__(self, session_factory: async_sessionmaker[AsyncSession]) -> None: + self._sf = session_factory + + @staticmethod + def _row_to_dict(row: FeedbackRow) -> dict: + d = row.to_dict() + val = d.get("created_at") + if isinstance(val, datetime): + d["created_at"] = val.isoformat() + return d + + async def create( + self, + *, + run_id: str, + thread_id: str, + rating: int, + user_id: str | None | _AutoSentinel = AUTO, + message_id: str | None = None, + comment: str | None = None, + ) -> dict: + """Create a feedback record. rating must be +1 or -1.""" + if rating not in (1, -1): + raise ValueError(f"rating must be +1 or -1, got {rating}") + resolved_user_id = resolve_user_id(user_id, method_name="FeedbackRepository.create") + row = FeedbackRow( + feedback_id=str(uuid.uuid4()), + run_id=run_id, + thread_id=thread_id, + user_id=resolved_user_id, + message_id=message_id, + rating=rating, + comment=comment, + created_at=datetime.now(UTC), + ) + async with self._sf() as session: + session.add(row) + await session.commit() + await session.refresh(row) + return self._row_to_dict(row) + + async def get( + self, + feedback_id: str, + *, + user_id: str | None | _AutoSentinel = AUTO, + ) -> dict | None: + resolved_user_id = resolve_user_id(user_id, method_name="FeedbackRepository.get") + async with self._sf() as session: + row = await session.get(FeedbackRow, feedback_id) + if row is None: + return None + if resolved_user_id is not None and row.user_id != resolved_user_id: + return None + return self._row_to_dict(row) + + async def list_by_run( + self, + thread_id: str, + run_id: str, + *, + limit: int = 100, + user_id: str | None | _AutoSentinel = AUTO, + ) -> list[dict]: + resolved_user_id = resolve_user_id(user_id, method_name="FeedbackRepository.list_by_run") + stmt = select(FeedbackRow).where(FeedbackRow.thread_id == thread_id, FeedbackRow.run_id == run_id) + if resolved_user_id is not None: + stmt = stmt.where(FeedbackRow.user_id == resolved_user_id) + stmt = stmt.order_by(FeedbackRow.created_at.asc()).limit(limit) + async with self._sf() as session: + result = await session.execute(stmt) + return [self._row_to_dict(r) for r in result.scalars()] + + async def list_by_thread( + self, + thread_id: str, + *, + limit: int = 100, + user_id: str | None | _AutoSentinel = AUTO, + ) -> list[dict]: + resolved_user_id = resolve_user_id(user_id, method_name="FeedbackRepository.list_by_thread") + stmt = select(FeedbackRow).where(FeedbackRow.thread_id == thread_id) + if resolved_user_id is not None: + stmt = stmt.where(FeedbackRow.user_id == resolved_user_id) + stmt = stmt.order_by(FeedbackRow.created_at.asc()).limit(limit) + async with self._sf() as session: + result = await session.execute(stmt) + return [self._row_to_dict(r) for r in result.scalars()] + + async def delete( + self, + feedback_id: str, + *, + user_id: str | None | _AutoSentinel = AUTO, + ) -> bool: + resolved_user_id = resolve_user_id(user_id, method_name="FeedbackRepository.delete") + async with self._sf() as session: + row = await session.get(FeedbackRow, feedback_id) + if row is None: + return False + if resolved_user_id is not None and row.user_id != resolved_user_id: + return False + await session.delete(row) + await session.commit() + return True + + async def upsert( + self, + *, + run_id: str, + thread_id: str, + rating: int, + user_id: str | None | _AutoSentinel = AUTO, + comment: str | None = None, + ) -> dict: + """Create or update feedback for (thread_id, run_id, user_id). rating must be +1 or -1.""" + if rating not in (1, -1): + raise ValueError(f"rating must be +1 or -1, got {rating}") + resolved_user_id = resolve_user_id(user_id, method_name="FeedbackRepository.upsert") + async with self._sf() as session: + stmt = select(FeedbackRow).where( + FeedbackRow.thread_id == thread_id, + FeedbackRow.run_id == run_id, + FeedbackRow.user_id == resolved_user_id, + ) + result = await session.execute(stmt) + row = result.scalar_one_or_none() + if row is not None: + row.rating = rating + row.comment = comment + row.created_at = datetime.now(UTC) + else: + row = FeedbackRow( + feedback_id=str(uuid.uuid4()), + run_id=run_id, + thread_id=thread_id, + user_id=resolved_user_id, + rating=rating, + comment=comment, + created_at=datetime.now(UTC), + ) + session.add(row) + await session.commit() + await session.refresh(row) + return self._row_to_dict(row) + + async def delete_by_run( + self, + *, + thread_id: str, + run_id: str, + user_id: str | None | _AutoSentinel = AUTO, + ) -> bool: + """Delete the current user's feedback for a run. Returns True if a record was deleted.""" + resolved_user_id = resolve_user_id(user_id, method_name="FeedbackRepository.delete_by_run") + async with self._sf() as session: + stmt = select(FeedbackRow).where( + FeedbackRow.thread_id == thread_id, + FeedbackRow.run_id == run_id, + FeedbackRow.user_id == resolved_user_id, + ) + result = await session.execute(stmt) + row = result.scalar_one_or_none() + if row is None: + return False + await session.delete(row) + await session.commit() + return True + + async def list_by_thread_grouped( + self, + thread_id: str, + *, + user_id: str | None | _AutoSentinel = AUTO, + ) -> dict[str, dict]: + """Return feedback grouped by run_id for a thread: {run_id: feedback_dict}.""" + resolved_user_id = resolve_user_id(user_id, method_name="FeedbackRepository.list_by_thread_grouped") + stmt = select(FeedbackRow).where(FeedbackRow.thread_id == thread_id) + if resolved_user_id is not None: + stmt = stmt.where(FeedbackRow.user_id == resolved_user_id) + async with self._sf() as session: + result = await session.execute(stmt) + return {row.run_id: self._row_to_dict(row) for row in result.scalars()} + + async def aggregate_by_run(self, thread_id: str, run_id: str) -> dict: + """Aggregate feedback stats for a run using database-side counting.""" + stmt = select( + func.count().label("total"), + func.coalesce(func.sum(case((FeedbackRow.rating == 1, 1), else_=0)), 0).label("positive"), + func.coalesce(func.sum(case((FeedbackRow.rating == -1, 1), else_=0)), 0).label("negative"), + ).where(FeedbackRow.thread_id == thread_id, FeedbackRow.run_id == run_id) + async with self._sf() as session: + row = (await session.execute(stmt)).one() + return { + "run_id": run_id, + "total": row.total, + "positive": row.positive, + "negative": row.negative, + } diff --git a/backend/packages/harness/deerflow/persistence/migrations/alembic.ini b/backend/packages/harness/deerflow/persistence/migrations/alembic.ini new file mode 100644 index 000000000..71b4b1dc0 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/migrations/alembic.ini @@ -0,0 +1,38 @@ +[alembic] +script_location = %(here)s +# Default URL for offline mode / autogenerate. +# Runtime uses engine from DeerFlow config. +sqlalchemy.url = sqlite+aiosqlite:///./data/deerflow.db + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/packages/harness/deerflow/persistence/migrations/env.py b/backend/packages/harness/deerflow/persistence/migrations/env.py new file mode 100644 index 000000000..22d053ee7 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/migrations/env.py @@ -0,0 +1,67 @@ +"""Alembic environment for DeerFlow application tables. + +ONLY manages DeerFlow's tables (runs, threads_meta, cron_jobs, users). +LangGraph's checkpointer tables are managed by LangGraph itself -- they +have their own schema lifecycle and must not be touched by Alembic. +""" + +from __future__ import annotations + +import asyncio +import logging +from logging.config import fileConfig + +from alembic import context +from sqlalchemy.ext.asyncio import create_async_engine + +from deerflow.persistence.base import Base + +# Import all models so metadata is populated. +try: + import deerflow.persistence.models as models # register ORM models with Base.metadata + + _ = models +except ImportError: + # Models not available — migration will work with existing metadata only. + logging.getLogger(__name__).warning("Could not import deerflow.persistence.models; Alembic may not detect all tables") + +config = context.config +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +target_metadata = Base.metadata + + +def run_migrations_offline() -> None: + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + render_as_batch=True, + ) + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection): + context.configure( + connection=connection, + target_metadata=target_metadata, + render_as_batch=True, # Required for SQLite ALTER TABLE support + ) + with context.begin_transaction(): + context.run_migrations() + + +async def run_migrations_online() -> None: + connectable = create_async_engine(config.get_main_option("sqlalchemy.url")) + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + await connectable.dispose() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + asyncio.run(run_migrations_online()) diff --git a/backend/packages/harness/deerflow/persistence/migrations/versions/.gitkeep b/backend/packages/harness/deerflow/persistence/migrations/versions/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/backend/packages/harness/deerflow/persistence/models/__init__.py b/backend/packages/harness/deerflow/persistence/models/__init__.py new file mode 100644 index 000000000..ab29a3536 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/models/__init__.py @@ -0,0 +1,23 @@ +"""ORM model registration entry point. + +Importing this module ensures all ORM models are registered with +``Base.metadata`` so Alembic autogenerate detects every table. + +The actual ORM classes have moved to entity-specific subpackages: +- ``deerflow.persistence.thread_meta`` +- ``deerflow.persistence.run`` +- ``deerflow.persistence.feedback`` +- ``deerflow.persistence.user`` + +``RunEventRow`` remains in ``deerflow.persistence.models.run_event`` because +its storage implementation lives in ``deerflow.runtime.events.store.db`` and +there is no matching entity directory. +""" + +from deerflow.persistence.feedback.model import FeedbackRow +from deerflow.persistence.models.run_event import RunEventRow +from deerflow.persistence.run.model import RunRow +from deerflow.persistence.thread_meta.model import ThreadMetaRow +from deerflow.persistence.user.model import UserRow + +__all__ = ["FeedbackRow", "RunEventRow", "RunRow", "ThreadMetaRow", "UserRow"] diff --git a/backend/packages/harness/deerflow/persistence/models/run_event.py b/backend/packages/harness/deerflow/persistence/models/run_event.py new file mode 100644 index 000000000..4f22b4616 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/models/run_event.py @@ -0,0 +1,35 @@ +"""ORM model for run events.""" + +from __future__ import annotations + +from datetime import UTC, datetime + +from sqlalchemy import JSON, DateTime, Index, String, Text, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column + +from deerflow.persistence.base import Base + + +class RunEventRow(Base): + __tablename__ = "run_events" + + id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True) + thread_id: Mapped[str] = mapped_column(String(64), nullable=False) + run_id: Mapped[str] = mapped_column(String(64), nullable=False) + # Owner of the conversation this event belongs to. Nullable for data + # created before auth was introduced; populated by auth middleware on + # new writes and by the boot-time orphan migration on existing rows. + user_id: Mapped[str | None] = mapped_column(String(64), nullable=True, index=True) + event_type: Mapped[str] = mapped_column(String(32), nullable=False) + category: Mapped[str] = mapped_column(String(16), nullable=False) + # "message" | "trace" | "lifecycle" + content: Mapped[str] = mapped_column(Text, default="") + event_metadata: Mapped[dict] = mapped_column(JSON, default=dict) + seq: Mapped[int] = mapped_column(nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(UTC)) + + __table_args__ = ( + UniqueConstraint("thread_id", "seq", name="uq_events_thread_seq"), + Index("ix_events_thread_cat_seq", "thread_id", "category", "seq"), + Index("ix_events_run", "thread_id", "run_id", "seq"), + ) diff --git a/backend/packages/harness/deerflow/persistence/run/__init__.py b/backend/packages/harness/deerflow/persistence/run/__init__.py new file mode 100644 index 000000000..0aa01e7ea --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/run/__init__.py @@ -0,0 +1,6 @@ +"""Run metadata persistence — ORM and SQL repository.""" + +from deerflow.persistence.run.model import RunRow +from deerflow.persistence.run.sql import RunRepository + +__all__ = ["RunRepository", "RunRow"] diff --git a/backend/packages/harness/deerflow/persistence/run/model.py b/backend/packages/harness/deerflow/persistence/run/model.py new file mode 100644 index 000000000..d0dfe4085 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/run/model.py @@ -0,0 +1,49 @@ +"""ORM model for run metadata.""" + +from __future__ import annotations + +from datetime import UTC, datetime + +from sqlalchemy import JSON, DateTime, Index, String, Text +from sqlalchemy.orm import Mapped, mapped_column + +from deerflow.persistence.base import Base + + +class RunRow(Base): + __tablename__ = "runs" + + run_id: Mapped[str] = mapped_column(String(64), primary_key=True) + thread_id: Mapped[str] = mapped_column(String(64), nullable=False, index=True) + assistant_id: Mapped[str | None] = mapped_column(String(128)) + user_id: Mapped[str | None] = mapped_column(String(64), index=True) + status: Mapped[str] = mapped_column(String(20), default="pending") + # "pending" | "running" | "success" | "error" | "timeout" | "interrupted" + + model_name: Mapped[str | None] = mapped_column(String(128)) + multitask_strategy: Mapped[str] = mapped_column(String(20), default="reject") + metadata_json: Mapped[dict] = mapped_column(JSON, default=dict) + kwargs_json: Mapped[dict] = mapped_column(JSON, default=dict) + error: Mapped[str | None] = mapped_column(Text) + + # Convenience fields (for listing pages without querying RunEventStore) + message_count: Mapped[int] = mapped_column(default=0) + first_human_message: Mapped[str | None] = mapped_column(Text) + last_ai_message: Mapped[str | None] = mapped_column(Text) + + # Token usage (accumulated in-memory by RunJournal, written on run completion) + total_input_tokens: Mapped[int] = mapped_column(default=0) + total_output_tokens: Mapped[int] = mapped_column(default=0) + total_tokens: Mapped[int] = mapped_column(default=0) + llm_call_count: Mapped[int] = mapped_column(default=0) + lead_agent_tokens: Mapped[int] = mapped_column(default=0) + subagent_tokens: Mapped[int] = mapped_column(default=0) + middleware_tokens: Mapped[int] = mapped_column(default=0) + + # Follow-up association + follow_up_to_run_id: Mapped[str | None] = mapped_column(String(64)) + + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(UTC)) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(UTC), onupdate=lambda: datetime.now(UTC)) + + __table_args__ = (Index("ix_runs_thread_status", "thread_id", "status"),) diff --git a/backend/packages/harness/deerflow/persistence/run/sql.py b/backend/packages/harness/deerflow/persistence/run/sql.py new file mode 100644 index 000000000..fcd1a3411 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/run/sql.py @@ -0,0 +1,255 @@ +"""SQLAlchemy-backed RunStore implementation. + +Each method acquires and releases its own short-lived session. +Run status updates happen from background workers that may live +minutes -- we don't hold connections across long execution. +""" + +from __future__ import annotations + +import json +from datetime import UTC, datetime +from typing import Any + +from sqlalchemy import func, select, update +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker + +from deerflow.persistence.run.model import RunRow +from deerflow.runtime.runs.store.base import RunStore +from deerflow.runtime.user_context import AUTO, _AutoSentinel, resolve_user_id + + +class RunRepository(RunStore): + def __init__(self, session_factory: async_sessionmaker[AsyncSession]) -> None: + self._sf = session_factory + + @staticmethod + def _safe_json(obj: Any) -> Any: + """Ensure obj is JSON-serializable. Falls back to model_dump() or str().""" + if obj is None: + return None + if isinstance(obj, (str, int, float, bool)): + return obj + if isinstance(obj, dict): + return {k: RunRepository._safe_json(v) for k, v in obj.items()} + if isinstance(obj, (list, tuple)): + return [RunRepository._safe_json(v) for v in obj] + if hasattr(obj, "model_dump"): + try: + return obj.model_dump() + except Exception: + pass + if hasattr(obj, "dict"): + try: + return obj.dict() + except Exception: + pass + try: + json.dumps(obj) + return obj + except (TypeError, ValueError): + return str(obj) + + @staticmethod + def _row_to_dict(row: RunRow) -> dict[str, Any]: + d = row.to_dict() + # Remap JSON columns to match RunStore interface + d["metadata"] = d.pop("metadata_json", {}) + d["kwargs"] = d.pop("kwargs_json", {}) + # Convert datetime to ISO string for consistency with MemoryRunStore + for key in ("created_at", "updated_at"): + val = d.get(key) + if isinstance(val, datetime): + d[key] = val.isoformat() + return d + + async def put( + self, + run_id, + *, + thread_id, + assistant_id=None, + user_id: str | None | _AutoSentinel = AUTO, + status="pending", + multitask_strategy="reject", + metadata=None, + kwargs=None, + error=None, + created_at=None, + follow_up_to_run_id=None, + ): + resolved_user_id = resolve_user_id(user_id, method_name="RunRepository.put") + now = datetime.now(UTC) + row = RunRow( + run_id=run_id, + thread_id=thread_id, + assistant_id=assistant_id, + user_id=resolved_user_id, + status=status, + multitask_strategy=multitask_strategy, + metadata_json=self._safe_json(metadata) or {}, + kwargs_json=self._safe_json(kwargs) or {}, + error=error, + follow_up_to_run_id=follow_up_to_run_id, + created_at=datetime.fromisoformat(created_at) if created_at else now, + updated_at=now, + ) + async with self._sf() as session: + session.add(row) + await session.commit() + + async def get( + self, + run_id, + *, + user_id: str | None | _AutoSentinel = AUTO, + ): + resolved_user_id = resolve_user_id(user_id, method_name="RunRepository.get") + async with self._sf() as session: + row = await session.get(RunRow, run_id) + if row is None: + return None + if resolved_user_id is not None and row.user_id != resolved_user_id: + return None + return self._row_to_dict(row) + + async def list_by_thread( + self, + thread_id, + *, + user_id: str | None | _AutoSentinel = AUTO, + limit=100, + ): + resolved_user_id = resolve_user_id(user_id, method_name="RunRepository.list_by_thread") + stmt = select(RunRow).where(RunRow.thread_id == thread_id) + if resolved_user_id is not None: + stmt = stmt.where(RunRow.user_id == resolved_user_id) + stmt = stmt.order_by(RunRow.created_at.desc()).limit(limit) + async with self._sf() as session: + result = await session.execute(stmt) + return [self._row_to_dict(r) for r in result.scalars()] + + async def update_status(self, run_id, status, *, error=None): + values: dict[str, Any] = {"status": status, "updated_at": datetime.now(UTC)} + if error is not None: + values["error"] = error + async with self._sf() as session: + await session.execute(update(RunRow).where(RunRow.run_id == run_id).values(**values)) + await session.commit() + + async def delete( + self, + run_id, + *, + user_id: str | None | _AutoSentinel = AUTO, + ): + resolved_user_id = resolve_user_id(user_id, method_name="RunRepository.delete") + async with self._sf() as session: + row = await session.get(RunRow, run_id) + if row is None: + return + if resolved_user_id is not None and row.user_id != resolved_user_id: + return + await session.delete(row) + await session.commit() + + async def list_pending(self, *, before=None): + if before is None: + before_dt = datetime.now(UTC) + elif isinstance(before, datetime): + before_dt = before + else: + before_dt = datetime.fromisoformat(before) + stmt = select(RunRow).where(RunRow.status == "pending", RunRow.created_at <= before_dt).order_by(RunRow.created_at.asc()) + async with self._sf() as session: + result = await session.execute(stmt) + return [self._row_to_dict(r) for r in result.scalars()] + + async def update_run_completion( + self, + run_id: str, + *, + status: str, + total_input_tokens: int = 0, + total_output_tokens: int = 0, + total_tokens: int = 0, + llm_call_count: int = 0, + lead_agent_tokens: int = 0, + subagent_tokens: int = 0, + middleware_tokens: int = 0, + message_count: int = 0, + last_ai_message: str | None = None, + first_human_message: str | None = None, + error: str | None = None, + ) -> None: + """Update status + token usage + convenience fields on run completion.""" + values: dict[str, Any] = { + "status": status, + "total_input_tokens": total_input_tokens, + "total_output_tokens": total_output_tokens, + "total_tokens": total_tokens, + "llm_call_count": llm_call_count, + "lead_agent_tokens": lead_agent_tokens, + "subagent_tokens": subagent_tokens, + "middleware_tokens": middleware_tokens, + "message_count": message_count, + "updated_at": datetime.now(UTC), + } + if last_ai_message is not None: + values["last_ai_message"] = last_ai_message[:2000] + if first_human_message is not None: + values["first_human_message"] = first_human_message[:2000] + if error is not None: + values["error"] = error + async with self._sf() as session: + await session.execute(update(RunRow).where(RunRow.run_id == run_id).values(**values)) + await session.commit() + + async def aggregate_tokens_by_thread(self, thread_id: str) -> dict[str, Any]: + """Aggregate token usage via a single SQL GROUP BY query.""" + _completed = RunRow.status.in_(("success", "error")) + _thread = RunRow.thread_id == thread_id + + stmt = ( + select( + func.coalesce(RunRow.model_name, "unknown").label("model"), + func.count().label("runs"), + func.coalesce(func.sum(RunRow.total_tokens), 0).label("total_tokens"), + func.coalesce(func.sum(RunRow.total_input_tokens), 0).label("total_input_tokens"), + func.coalesce(func.sum(RunRow.total_output_tokens), 0).label("total_output_tokens"), + func.coalesce(func.sum(RunRow.lead_agent_tokens), 0).label("lead_agent"), + func.coalesce(func.sum(RunRow.subagent_tokens), 0).label("subagent"), + func.coalesce(func.sum(RunRow.middleware_tokens), 0).label("middleware"), + ) + .where(_thread, _completed) + .group_by(func.coalesce(RunRow.model_name, "unknown")) + ) + + async with self._sf() as session: + rows = (await session.execute(stmt)).all() + + total_tokens = total_input = total_output = total_runs = 0 + lead_agent = subagent = middleware = 0 + by_model: dict[str, dict] = {} + for r in rows: + by_model[r.model] = {"tokens": r.total_tokens, "runs": r.runs} + total_tokens += r.total_tokens + total_input += r.total_input_tokens + total_output += r.total_output_tokens + total_runs += r.runs + lead_agent += r.lead_agent + subagent += r.subagent + middleware += r.middleware + + return { + "total_tokens": total_tokens, + "total_input_tokens": total_input, + "total_output_tokens": total_output, + "total_runs": total_runs, + "by_model": by_model, + "by_caller": { + "lead_agent": lead_agent, + "subagent": subagent, + "middleware": middleware, + }, + } diff --git a/backend/packages/harness/deerflow/persistence/thread_meta/__init__.py b/backend/packages/harness/deerflow/persistence/thread_meta/__init__.py new file mode 100644 index 000000000..080ce8093 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/thread_meta/__init__.py @@ -0,0 +1,38 @@ +"""Thread metadata persistence — ORM, abstract store, and concrete implementations.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from deerflow.persistence.thread_meta.base import ThreadMetaStore +from deerflow.persistence.thread_meta.memory import MemoryThreadMetaStore +from deerflow.persistence.thread_meta.model import ThreadMetaRow +from deerflow.persistence.thread_meta.sql import ThreadMetaRepository + +if TYPE_CHECKING: + from langgraph.store.base import BaseStore + from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker + +__all__ = [ + "MemoryThreadMetaStore", + "ThreadMetaRepository", + "ThreadMetaRow", + "ThreadMetaStore", + "make_thread_store", +] + + +def make_thread_store( + session_factory: async_sessionmaker[AsyncSession] | None, + store: BaseStore | None = None, +) -> ThreadMetaStore: + """Create the appropriate ThreadMetaStore based on available backends. + + Returns a SQL-backed repository when a session factory is available, + otherwise falls back to the in-memory LangGraph Store implementation. + """ + if session_factory is not None: + return ThreadMetaRepository(session_factory) + if store is None: + raise ValueError("make_thread_store requires either a session_factory (SQL) or a store (memory)") + return MemoryThreadMetaStore(store) diff --git a/backend/packages/harness/deerflow/persistence/thread_meta/base.py b/backend/packages/harness/deerflow/persistence/thread_meta/base.py new file mode 100644 index 000000000..c87c10a16 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/thread_meta/base.py @@ -0,0 +1,76 @@ +"""Abstract interface for thread metadata storage. + +Implementations: +- ThreadMetaRepository: SQL-backed (sqlite / postgres via SQLAlchemy) +- MemoryThreadMetaStore: wraps LangGraph BaseStore (memory mode) + +All mutating and querying methods accept a ``user_id`` parameter with +three-state semantics (see :mod:`deerflow.runtime.user_context`): + +- ``AUTO`` (default): resolve from the request-scoped contextvar. +- Explicit ``str``: use the provided value verbatim. +- Explicit ``None``: bypass owner filtering (migration/CLI only). +""" + +from __future__ import annotations + +import abc + +from deerflow.runtime.user_context import AUTO, _AutoSentinel + + +class ThreadMetaStore(abc.ABC): + @abc.abstractmethod + async def create( + self, + thread_id: str, + *, + assistant_id: str | None = None, + user_id: str | None | _AutoSentinel = AUTO, + display_name: str | None = None, + metadata: dict | None = None, + ) -> dict: + pass + + @abc.abstractmethod + async def get(self, thread_id: str, *, user_id: str | None | _AutoSentinel = AUTO) -> dict | None: + pass + + @abc.abstractmethod + async def search( + self, + *, + metadata: dict | None = None, + status: str | None = None, + limit: int = 100, + offset: int = 0, + user_id: str | None | _AutoSentinel = AUTO, + ) -> list[dict]: + pass + + @abc.abstractmethod + async def update_display_name(self, thread_id: str, display_name: str, *, user_id: str | None | _AutoSentinel = AUTO) -> None: + pass + + @abc.abstractmethod + async def update_status(self, thread_id: str, status: str, *, user_id: str | None | _AutoSentinel = AUTO) -> None: + pass + + @abc.abstractmethod + async def update_metadata(self, thread_id: str, metadata: dict, *, user_id: str | None | _AutoSentinel = AUTO) -> None: + """Merge ``metadata`` into the thread's metadata field. + + Existing keys are overwritten by the new values; keys absent from + ``metadata`` are preserved. No-op if the thread does not exist + or the owner check fails. + """ + pass + + @abc.abstractmethod + async def check_access(self, thread_id: str, user_id: str, *, require_existing: bool = False) -> bool: + """Check if ``user_id`` has access to ``thread_id``.""" + pass + + @abc.abstractmethod + async def delete(self, thread_id: str, *, user_id: str | None | _AutoSentinel = AUTO) -> None: + pass diff --git a/backend/packages/harness/deerflow/persistence/thread_meta/memory.py b/backend/packages/harness/deerflow/persistence/thread_meta/memory.py new file mode 100644 index 000000000..ccf59ad42 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/thread_meta/memory.py @@ -0,0 +1,149 @@ +"""In-memory ThreadMetaStore backed by LangGraph BaseStore. + +Used when database.backend=memory. Delegates to the LangGraph Store's +``("threads",)`` namespace — the same namespace used by the Gateway +router for thread records. +""" + +from __future__ import annotations + +import time +from typing import Any + +from langgraph.store.base import BaseStore + +from deerflow.persistence.thread_meta.base import ThreadMetaStore +from deerflow.runtime.user_context import AUTO, _AutoSentinel, resolve_user_id + +THREADS_NS: tuple[str, ...] = ("threads",) + + +class MemoryThreadMetaStore(ThreadMetaStore): + def __init__(self, store: BaseStore) -> None: + self._store = store + + async def _get_owned_record( + self, + thread_id: str, + user_id: str | None | _AutoSentinel, + method_name: str, + ) -> dict | None: + """Fetch a record and verify ownership. Returns a mutable copy, or None.""" + resolved = resolve_user_id(user_id, method_name=method_name) + item = await self._store.aget(THREADS_NS, thread_id) + if item is None: + return None + record = dict(item.value) + if resolved is not None and record.get("user_id") != resolved: + return None + return record + + async def create( + self, + thread_id: str, + *, + assistant_id: str | None = None, + user_id: str | None | _AutoSentinel = AUTO, + display_name: str | None = None, + metadata: dict | None = None, + ) -> dict: + resolved_user_id = resolve_user_id(user_id, method_name="MemoryThreadMetaStore.create") + now = time.time() + record: dict[str, Any] = { + "thread_id": thread_id, + "assistant_id": assistant_id, + "user_id": resolved_user_id, + "display_name": display_name, + "status": "idle", + "metadata": metadata or {}, + "values": {}, + "created_at": now, + "updated_at": now, + } + await self._store.aput(THREADS_NS, thread_id, record) + return record + + async def get(self, thread_id: str, *, user_id: str | None | _AutoSentinel = AUTO) -> dict | None: + return await self._get_owned_record(thread_id, user_id, "MemoryThreadMetaStore.get") + + async def search( + self, + *, + metadata: dict | None = None, + status: str | None = None, + limit: int = 100, + offset: int = 0, + user_id: str | None | _AutoSentinel = AUTO, + ) -> list[dict]: + resolved_user_id = resolve_user_id(user_id, method_name="MemoryThreadMetaStore.search") + filter_dict: dict[str, Any] = {} + if metadata: + filter_dict.update(metadata) + if status: + filter_dict["status"] = status + if resolved_user_id is not None: + filter_dict["user_id"] = resolved_user_id + + items = await self._store.asearch( + THREADS_NS, + filter=filter_dict or None, + limit=limit, + offset=offset, + ) + return [self._item_to_dict(item) for item in items] + + async def check_access(self, thread_id: str, user_id: str, *, require_existing: bool = False) -> bool: + item = await self._store.aget(THREADS_NS, thread_id) + if item is None: + return not require_existing + record_user_id = item.value.get("user_id") + if record_user_id is None: + return True + return record_user_id == user_id + + async def update_display_name(self, thread_id: str, display_name: str, *, user_id: str | None | _AutoSentinel = AUTO) -> None: + record = await self._get_owned_record(thread_id, user_id, "MemoryThreadMetaStore.update_display_name") + if record is None: + return + record["display_name"] = display_name + record["updated_at"] = time.time() + await self._store.aput(THREADS_NS, thread_id, record) + + async def update_status(self, thread_id: str, status: str, *, user_id: str | None | _AutoSentinel = AUTO) -> None: + record = await self._get_owned_record(thread_id, user_id, "MemoryThreadMetaStore.update_status") + if record is None: + return + record["status"] = status + record["updated_at"] = time.time() + await self._store.aput(THREADS_NS, thread_id, record) + + async def update_metadata(self, thread_id: str, metadata: dict, *, user_id: str | None | _AutoSentinel = AUTO) -> None: + record = await self._get_owned_record(thread_id, user_id, "MemoryThreadMetaStore.update_metadata") + if record is None: + return + merged = dict(record.get("metadata") or {}) + merged.update(metadata) + record["metadata"] = merged + record["updated_at"] = time.time() + await self._store.aput(THREADS_NS, thread_id, record) + + async def delete(self, thread_id: str, *, user_id: str | None | _AutoSentinel = AUTO) -> None: + record = await self._get_owned_record(thread_id, user_id, "MemoryThreadMetaStore.delete") + if record is None: + return + await self._store.adelete(THREADS_NS, thread_id) + + @staticmethod + def _item_to_dict(item) -> dict[str, Any]: + """Convert a Store SearchItem to the dict format expected by callers.""" + val = item.value + return { + "thread_id": item.key, + "assistant_id": val.get("assistant_id"), + "user_id": val.get("user_id"), + "display_name": val.get("display_name"), + "status": val.get("status", "idle"), + "metadata": val.get("metadata", {}), + "created_at": str(val.get("created_at", "")), + "updated_at": str(val.get("updated_at", "")), + } diff --git a/backend/packages/harness/deerflow/persistence/thread_meta/model.py b/backend/packages/harness/deerflow/persistence/thread_meta/model.py new file mode 100644 index 000000000..fe15315e1 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/thread_meta/model.py @@ -0,0 +1,23 @@ +"""ORM model for thread metadata.""" + +from __future__ import annotations + +from datetime import UTC, datetime + +from sqlalchemy import JSON, DateTime, String +from sqlalchemy.orm import Mapped, mapped_column + +from deerflow.persistence.base import Base + + +class ThreadMetaRow(Base): + __tablename__ = "threads_meta" + + thread_id: Mapped[str] = mapped_column(String(64), primary_key=True) + assistant_id: Mapped[str | None] = mapped_column(String(128), index=True) + user_id: Mapped[str | None] = mapped_column(String(64), index=True) + display_name: Mapped[str | None] = mapped_column(String(256)) + status: Mapped[str] = mapped_column(String(20), default="idle") + metadata_json: Mapped[dict] = mapped_column(JSON, default=dict) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(UTC)) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=lambda: datetime.now(UTC), onupdate=lambda: datetime.now(UTC)) diff --git a/backend/packages/harness/deerflow/persistence/thread_meta/sql.py b/backend/packages/harness/deerflow/persistence/thread_meta/sql.py new file mode 100644 index 000000000..688fbb247 --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/thread_meta/sql.py @@ -0,0 +1,217 @@ +"""SQLAlchemy-backed thread metadata repository.""" + +from __future__ import annotations + +from datetime import UTC, datetime +from typing import Any + +from sqlalchemy import select, update +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker + +from deerflow.persistence.thread_meta.base import ThreadMetaStore +from deerflow.persistence.thread_meta.model import ThreadMetaRow +from deerflow.runtime.user_context import AUTO, _AutoSentinel, resolve_user_id + + +class ThreadMetaRepository(ThreadMetaStore): + def __init__(self, session_factory: async_sessionmaker[AsyncSession]) -> None: + self._sf = session_factory + + @staticmethod + def _row_to_dict(row: ThreadMetaRow) -> dict[str, Any]: + d = row.to_dict() + d["metadata"] = d.pop("metadata_json", {}) + for key in ("created_at", "updated_at"): + val = d.get(key) + if isinstance(val, datetime): + d[key] = val.isoformat() + return d + + async def create( + self, + thread_id: str, + *, + assistant_id: str | None = None, + user_id: str | None | _AutoSentinel = AUTO, + display_name: str | None = None, + metadata: dict | None = None, + ) -> dict: + # Auto-resolve user_id from contextvar when AUTO; explicit None + # creates an orphan row (used by migration scripts). + resolved_user_id = resolve_user_id(user_id, method_name="ThreadMetaRepository.create") + now = datetime.now(UTC) + row = ThreadMetaRow( + thread_id=thread_id, + assistant_id=assistant_id, + user_id=resolved_user_id, + display_name=display_name, + metadata_json=metadata or {}, + created_at=now, + updated_at=now, + ) + async with self._sf() as session: + session.add(row) + await session.commit() + await session.refresh(row) + return self._row_to_dict(row) + + async def get( + self, + thread_id: str, + *, + user_id: str | None | _AutoSentinel = AUTO, + ) -> dict | None: + resolved_user_id = resolve_user_id(user_id, method_name="ThreadMetaRepository.get") + async with self._sf() as session: + row = await session.get(ThreadMetaRow, thread_id) + if row is None: + return None + # Enforce owner filter unless explicitly bypassed (user_id=None). + if resolved_user_id is not None and row.user_id != resolved_user_id: + return None + return self._row_to_dict(row) + + async def check_access(self, thread_id: str, user_id: str, *, require_existing: bool = False) -> bool: + """Check if ``user_id`` has access to ``thread_id``. + + Two modes — one row, two distinct semantics depending on what + the caller is about to do: + + - ``require_existing=False`` (default, permissive): + Returns True for: row missing (untracked legacy thread), + ``row.user_id`` is None (shared / pre-auth data), + or ``row.user_id == user_id``. Use for **read-style** + decorators where treating an untracked thread as accessible + preserves backward-compat. + + - ``require_existing=True`` (strict): + Returns True **only** when the row exists AND + (``row.user_id == user_id`` OR ``row.user_id is None``). + Use for **destructive / mutating** decorators (DELETE, PATCH, + state-update) so a thread that has *already been deleted* + cannot be re-targeted by any caller — closing the + delete-idempotence cross-user gap where the row vanishing + made every other user appear to "own" it. + """ + async with self._sf() as session: + row = await session.get(ThreadMetaRow, thread_id) + if row is None: + return not require_existing + if row.user_id is None: + return True + return row.user_id == user_id + + async def search( + self, + *, + metadata: dict | None = None, + status: str | None = None, + limit: int = 100, + offset: int = 0, + user_id: str | None | _AutoSentinel = AUTO, + ) -> list[dict]: + """Search threads with optional metadata and status filters. + + Owner filter is enforced by default: caller must be in a user + context. Pass ``user_id=None`` to bypass (migration/CLI). + """ + resolved_user_id = resolve_user_id(user_id, method_name="ThreadMetaRepository.search") + stmt = select(ThreadMetaRow).order_by(ThreadMetaRow.updated_at.desc()) + if resolved_user_id is not None: + stmt = stmt.where(ThreadMetaRow.user_id == resolved_user_id) + if status: + stmt = stmt.where(ThreadMetaRow.status == status) + + if metadata: + # When metadata filter is active, fetch a larger window and filter + # in Python. TODO(Phase 2): use JSON DB operators (Postgres @>, + # SQLite json_extract) for server-side filtering. + stmt = stmt.limit(limit * 5 + offset) + async with self._sf() as session: + result = await session.execute(stmt) + rows = [self._row_to_dict(r) for r in result.scalars()] + rows = [r for r in rows if all(r.get("metadata", {}).get(k) == v for k, v in metadata.items())] + return rows[offset : offset + limit] + else: + stmt = stmt.limit(limit).offset(offset) + async with self._sf() as session: + result = await session.execute(stmt) + return [self._row_to_dict(r) for r in result.scalars()] + + async def _check_ownership(self, session: AsyncSession, thread_id: str, resolved_user_id: str | None) -> bool: + """Return True if the row exists and is owned (or filter bypassed).""" + if resolved_user_id is None: + return True # explicit bypass + row = await session.get(ThreadMetaRow, thread_id) + return row is not None and row.user_id == resolved_user_id + + async def update_display_name( + self, + thread_id: str, + display_name: str, + *, + user_id: str | None | _AutoSentinel = AUTO, + ) -> None: + """Update the display_name (title) for a thread.""" + resolved_user_id = resolve_user_id(user_id, method_name="ThreadMetaRepository.update_display_name") + async with self._sf() as session: + if not await self._check_ownership(session, thread_id, resolved_user_id): + return + await session.execute(update(ThreadMetaRow).where(ThreadMetaRow.thread_id == thread_id).values(display_name=display_name, updated_at=datetime.now(UTC))) + await session.commit() + + async def update_status( + self, + thread_id: str, + status: str, + *, + user_id: str | None | _AutoSentinel = AUTO, + ) -> None: + resolved_user_id = resolve_user_id(user_id, method_name="ThreadMetaRepository.update_status") + async with self._sf() as session: + if not await self._check_ownership(session, thread_id, resolved_user_id): + return + await session.execute(update(ThreadMetaRow).where(ThreadMetaRow.thread_id == thread_id).values(status=status, updated_at=datetime.now(UTC))) + await session.commit() + + async def update_metadata( + self, + thread_id: str, + metadata: dict, + *, + user_id: str | None | _AutoSentinel = AUTO, + ) -> None: + """Merge ``metadata`` into ``metadata_json``. + + Read-modify-write inside a single session/transaction so concurrent + callers see consistent state. No-op if the row does not exist or + the user_id check fails. + """ + resolved_user_id = resolve_user_id(user_id, method_name="ThreadMetaRepository.update_metadata") + async with self._sf() as session: + row = await session.get(ThreadMetaRow, thread_id) + if row is None: + return + if resolved_user_id is not None and row.user_id != resolved_user_id: + return + merged = dict(row.metadata_json or {}) + merged.update(metadata) + row.metadata_json = merged + row.updated_at = datetime.now(UTC) + await session.commit() + + async def delete( + self, + thread_id: str, + *, + user_id: str | None | _AutoSentinel = AUTO, + ) -> None: + resolved_user_id = resolve_user_id(user_id, method_name="ThreadMetaRepository.delete") + async with self._sf() as session: + row = await session.get(ThreadMetaRow, thread_id) + if row is None: + return + if resolved_user_id is not None and row.user_id != resolved_user_id: + return + await session.delete(row) + await session.commit() diff --git a/backend/packages/harness/deerflow/persistence/user/__init__.py b/backend/packages/harness/deerflow/persistence/user/__init__.py new file mode 100644 index 000000000..a60eeef2c --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/user/__init__.py @@ -0,0 +1,12 @@ +"""User storage subpackage. + +Holds the ORM model for the ``users`` table. The concrete repository +implementation (``SQLiteUserRepository``) lives in the app layer +(``app.gateway.auth.repositories.sqlite``) because it converts +between the ORM row and the auth module's pydantic ``User`` class. +This keeps the harness package free of any dependency on app code. +""" + +from deerflow.persistence.user.model import UserRow + +__all__ = ["UserRow"] diff --git a/backend/packages/harness/deerflow/persistence/user/model.py b/backend/packages/harness/deerflow/persistence/user/model.py new file mode 100644 index 000000000..130d4bfcb --- /dev/null +++ b/backend/packages/harness/deerflow/persistence/user/model.py @@ -0,0 +1,59 @@ +"""ORM model for the users table. + +Lives in the harness persistence package so it is picked up by +``Base.metadata.create_all()`` alongside ``threads_meta``, ``runs``, +``run_events``, and ``feedback``. Using the shared engine means: + +- One SQLite/Postgres database, one connection pool +- One schema initialisation codepath +- Consistent async sessions across auth and persistence reads +""" + +from __future__ import annotations + +from datetime import UTC, datetime + +from sqlalchemy import Boolean, DateTime, Index, String, text +from sqlalchemy.orm import Mapped, mapped_column + +from deerflow.persistence.base import Base + + +class UserRow(Base): + __tablename__ = "users" + + # UUIDs are stored as 36-char strings for cross-backend portability. + id: Mapped[str] = mapped_column(String(36), primary_key=True) + + email: Mapped[str] = mapped_column(String(320), unique=True, nullable=False, index=True) + password_hash: Mapped[str | None] = mapped_column(String(128), nullable=True) + + # "admin" | "user" — kept as plain string to avoid ALTER TABLE pain + # when new roles are introduced. + system_role: Mapped[str] = mapped_column(String(16), nullable=False, default="user") + + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + nullable=False, + default=lambda: datetime.now(UTC), + ) + + # OAuth linkage (optional). A partial unique index enforces one + # account per (provider, oauth_id) pair, leaving NULL/NULL rows + # unconstrained so plain password accounts can coexist. + oauth_provider: Mapped[str | None] = mapped_column(String(32), nullable=True) + oauth_id: Mapped[str | None] = mapped_column(String(128), nullable=True) + + # Auth lifecycle flags + needs_setup: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) + token_version: Mapped[int] = mapped_column(nullable=False, default=0) + + __table_args__ = ( + Index( + "idx_users_oauth_identity", + "oauth_provider", + "oauth_id", + unique=True, + sqlite_where=text("oauth_provider IS NOT NULL AND oauth_id IS NOT NULL"), + ), + ) diff --git a/backend/packages/harness/deerflow/runtime/__init__.py b/backend/packages/harness/deerflow/runtime/__init__.py index d7eccf101..5a3df2eb6 100644 --- a/backend/packages/harness/deerflow/runtime/__init__.py +++ b/backend/packages/harness/deerflow/runtime/__init__.py @@ -5,15 +5,22 @@ Re-exports the public API of :mod:`~deerflow.runtime.runs` and directly from ``deerflow.runtime``. """ -from .runs import ConflictError, DisconnectMode, RunManager, RunRecord, RunStatus, UnsupportedStrategyError, run_agent +from .checkpointer import checkpointer_context, get_checkpointer, make_checkpointer, reset_checkpointer +from .runs import ConflictError, DisconnectMode, RunContext, RunManager, RunRecord, RunStatus, UnsupportedStrategyError, run_agent from .serialization import serialize, serialize_channel_values, serialize_lc_object, serialize_messages_tuple from .store import get_store, make_store, reset_store, store_context from .stream_bridge import END_SENTINEL, HEARTBEAT_SENTINEL, MemoryStreamBridge, StreamBridge, StreamEvent, make_stream_bridge __all__ = [ + # checkpointer + "checkpointer_context", + "get_checkpointer", + "make_checkpointer", + "reset_checkpointer", # runs "ConflictError", "DisconnectMode", + "RunContext", "RunManager", "RunRecord", "RunStatus", diff --git a/backend/packages/harness/deerflow/agents/checkpointer/__init__.py b/backend/packages/harness/deerflow/runtime/checkpointer/__init__.py similarity index 100% rename from backend/packages/harness/deerflow/agents/checkpointer/__init__.py rename to backend/packages/harness/deerflow/runtime/checkpointer/__init__.py diff --git a/backend/packages/harness/deerflow/agents/checkpointer/async_provider.py b/backend/packages/harness/deerflow/runtime/checkpointer/async_provider.py similarity index 52% rename from backend/packages/harness/deerflow/agents/checkpointer/async_provider.py rename to backend/packages/harness/deerflow/runtime/checkpointer/async_provider.py index 1129fc6b0..9a04cb1af 100644 --- a/backend/packages/harness/deerflow/agents/checkpointer/async_provider.py +++ b/backend/packages/harness/deerflow/runtime/checkpointer/async_provider.py @@ -7,12 +7,12 @@ Supported backends: memory, sqlite, postgres. Usage (e.g. FastAPI lifespan):: - from deerflow.agents.checkpointer.async_provider import make_checkpointer + from deerflow.runtime.checkpointer.async_provider import make_checkpointer async with make_checkpointer() as checkpointer: app.state.checkpointer = checkpointer # InMemorySaver if not configured -For sync usage see :mod:`deerflow.agents.checkpointer.provider`. +For sync usage see :mod:`deerflow.runtime.checkpointer.provider`. """ from __future__ import annotations @@ -24,12 +24,12 @@ from collections.abc import AsyncIterator from langgraph.types import Checkpointer -from deerflow.agents.checkpointer.provider import ( +from deerflow.config.app_config import AppConfig, get_app_config +from deerflow.runtime.checkpointer.provider import ( POSTGRES_CONN_REQUIRED, POSTGRES_INSTALL, SQLITE_INSTALL, ) -from deerflow.config.app_config import get_app_config from deerflow.runtime.store._sqlite_utils import ensure_sqlite_parent_dir, resolve_sqlite_conn_str logger = logging.getLogger(__name__) @@ -84,23 +84,77 @@ async def _async_checkpointer(config) -> AsyncIterator[Checkpointer]: @contextlib.asynccontextmanager -async def make_checkpointer() -> AsyncIterator[Checkpointer]: - """Async context manager that yields a checkpointer for the caller's lifetime. - Resources are opened on enter and closed on exit — no global state:: - - async with make_checkpointer() as checkpointer: - app.state.checkpointer = checkpointer - - Yields an ``InMemorySaver`` when no checkpointer is configured in *config.yaml*. - """ - - config = get_app_config() - - if config.checkpointer is None: +async def _async_checkpointer_from_database(db_config) -> AsyncIterator[Checkpointer]: + """Async context manager that constructs a checkpointer from unified DatabaseConfig.""" + if db_config.backend == "memory": from langgraph.checkpoint.memory import InMemorySaver yield InMemorySaver() return - async with _async_checkpointer(config.checkpointer) as saver: - yield saver + if db_config.backend == "sqlite": + try: + from langgraph.checkpoint.sqlite.aio import AsyncSqliteSaver + except ImportError as exc: + raise ImportError(SQLITE_INSTALL) from exc + + conn_str = db_config.checkpointer_sqlite_path + ensure_sqlite_parent_dir(conn_str) + async with AsyncSqliteSaver.from_conn_string(conn_str) as saver: + await saver.setup() + yield saver + return + + if db_config.backend == "postgres": + try: + from langgraph.checkpoint.postgres.aio import AsyncPostgresSaver + except ImportError as exc: + raise ImportError(POSTGRES_INSTALL) from exc + + if not db_config.postgres_url: + raise ValueError("database.postgres_url is required for the postgres backend") + + async with AsyncPostgresSaver.from_conn_string(db_config.postgres_url) as saver: + await saver.setup() + yield saver + return + + raise ValueError(f"Unknown database backend: {db_config.backend!r}") + + +@contextlib.asynccontextmanager +async def make_checkpointer(app_config: AppConfig | None = None) -> AsyncIterator[Checkpointer]: + """Async context manager that yields a checkpointer for the caller's lifetime. + Resources are opened on enter and closed on exit -- no global state:: + + async with make_checkpointer(app_config) as checkpointer: + app.state.checkpointer = checkpointer + + Yields an ``InMemorySaver`` when no checkpointer is configured in *config.yaml*. + + Priority: + 1. Legacy ``checkpointer:`` config section (backward compatible) + 2. Unified ``database:`` config section + 3. Default InMemorySaver + """ + + if app_config is None: + app_config = get_app_config() + + # Legacy: standalone checkpointer config takes precedence + if app_config.checkpointer is not None: + async with _async_checkpointer(app_config.checkpointer) as saver: + yield saver + return + + # Unified database config + db_config = getattr(app_config, "database", None) + if db_config is not None and db_config.backend != "memory": + async with _async_checkpointer_from_database(db_config) as saver: + yield saver + return + + # Default: in-memory + from langgraph.checkpoint.memory import InMemorySaver + + yield InMemorySaver() diff --git a/backend/packages/harness/deerflow/agents/checkpointer/provider.py b/backend/packages/harness/deerflow/runtime/checkpointer/provider.py similarity index 98% rename from backend/packages/harness/deerflow/agents/checkpointer/provider.py rename to backend/packages/harness/deerflow/runtime/checkpointer/provider.py index 252e58be5..5ee66be83 100644 --- a/backend/packages/harness/deerflow/agents/checkpointer/provider.py +++ b/backend/packages/harness/deerflow/runtime/checkpointer/provider.py @@ -7,7 +7,7 @@ Supported backends: memory, sqlite, postgres. Usage:: - from deerflow.agents.checkpointer.provider import get_checkpointer, checkpointer_context + from deerflow.runtime.checkpointer.provider import get_checkpointer, checkpointer_context # Singleton — reused across calls, closed on process exit cp = get_checkpointer() diff --git a/backend/packages/harness/deerflow/runtime/converters.py b/backend/packages/harness/deerflow/runtime/converters.py new file mode 100644 index 000000000..79d3b2b84 --- /dev/null +++ b/backend/packages/harness/deerflow/runtime/converters.py @@ -0,0 +1,136 @@ +"""Pure functions to convert LangChain message objects to OpenAI Chat Completions format. + +Utility for translating LangChain message types to OpenAI-compatible dicts. +Not currently wired into RunJournal (which uses message.model_dump() directly), +but available for consumers that need the OpenAI wire format. +""" + +from __future__ import annotations + +import json +from typing import Any + +_ROLE_MAP = { + "human": "user", + "ai": "assistant", + "system": "system", + "tool": "tool", +} + + +def langchain_to_openai_message(message: Any) -> dict: + """Convert a single LangChain BaseMessage to an OpenAI message dict. + + Handles: + - HumanMessage → {"role": "user", "content": "..."} + - AIMessage (text only) → {"role": "assistant", "content": "..."} + - AIMessage (with tool_calls) → {"role": "assistant", "content": null, "tool_calls": [...]} + - AIMessage (text + tool_calls) → both content and tool_calls present + - AIMessage (list content / multimodal) → content preserved as list + - SystemMessage → {"role": "system", "content": "..."} + - ToolMessage → {"role": "tool", "tool_call_id": "...", "content": "..."} + """ + msg_type = getattr(message, "type", "") + role = _ROLE_MAP.get(msg_type, msg_type) + content = getattr(message, "content", "") + + if role == "tool": + return { + "role": "tool", + "tool_call_id": getattr(message, "tool_call_id", ""), + "content": content, + } + + if role == "assistant": + tool_calls = getattr(message, "tool_calls", None) or [] + result: dict = {"role": "assistant"} + + if tool_calls: + openai_tool_calls = [] + for tc in tool_calls: + args = tc.get("args", {}) + openai_tool_calls.append( + { + "id": tc.get("id", ""), + "type": "function", + "function": { + "name": tc.get("name", ""), + "arguments": json.dumps(args) if not isinstance(args, str) else args, + }, + } + ) + # If no text content, set content to null per OpenAI spec + result["content"] = content if (isinstance(content, list) and content) or (isinstance(content, str) and content) else None + result["tool_calls"] = openai_tool_calls + else: + result["content"] = content + + return result + + # user / system / unknown + return {"role": role, "content": content} + + +def _infer_finish_reason(message: Any) -> str: + """Infer OpenAI finish_reason from an AIMessage. + + Returns "tool_calls" if tool_calls present, else looks in + response_metadata.finish_reason, else returns "stop". + """ + tool_calls = getattr(message, "tool_calls", None) or [] + if tool_calls: + return "tool_calls" + resp_meta = getattr(message, "response_metadata", None) or {} + if isinstance(resp_meta, dict): + finish = resp_meta.get("finish_reason") + if finish: + return finish + return "stop" + + +def langchain_to_openai_completion(message: Any) -> dict: + """Convert an AIMessage and its metadata to an OpenAI completion response dict. + + Returns: + { + "id": message.id, + "model": message.response_metadata.get("model_name"), + "choices": [{"index": 0, "message": , "finish_reason": }], + "usage": {"prompt_tokens": ..., "completion_tokens": ..., "total_tokens": ...} or None, + } + """ + resp_meta = getattr(message, "response_metadata", None) or {} + model_name = resp_meta.get("model_name") if isinstance(resp_meta, dict) else None + + openai_msg = langchain_to_openai_message(message) + finish_reason = _infer_finish_reason(message) + + usage_metadata = getattr(message, "usage_metadata", None) + if usage_metadata is not None: + input_tokens = usage_metadata.get("input_tokens", 0) or 0 + output_tokens = usage_metadata.get("output_tokens", 0) or 0 + usage: dict | None = { + "prompt_tokens": input_tokens, + "completion_tokens": output_tokens, + "total_tokens": input_tokens + output_tokens, + } + else: + usage = None + + return { + "id": getattr(message, "id", None), + "model": model_name, + "choices": [ + { + "index": 0, + "message": openai_msg, + "finish_reason": finish_reason, + } + ], + "usage": usage, + } + + +def langchain_messages_to_openai(messages: list) -> list[dict]: + """Convert a list of LangChain BaseMessages to OpenAI message dicts.""" + return [langchain_to_openai_message(m) for m in messages] diff --git a/backend/packages/harness/deerflow/runtime/events/__init__.py b/backend/packages/harness/deerflow/runtime/events/__init__.py new file mode 100644 index 000000000..0da8fabe5 --- /dev/null +++ b/backend/packages/harness/deerflow/runtime/events/__init__.py @@ -0,0 +1,4 @@ +from deerflow.runtime.events.store.base import RunEventStore +from deerflow.runtime.events.store.memory import MemoryRunEventStore + +__all__ = ["MemoryRunEventStore", "RunEventStore"] diff --git a/backend/packages/harness/deerflow/runtime/events/store/__init__.py b/backend/packages/harness/deerflow/runtime/events/store/__init__.py new file mode 100644 index 000000000..55f0dd33f --- /dev/null +++ b/backend/packages/harness/deerflow/runtime/events/store/__init__.py @@ -0,0 +1,26 @@ +from deerflow.runtime.events.store.base import RunEventStore +from deerflow.runtime.events.store.memory import MemoryRunEventStore + + +def make_run_event_store(config=None) -> RunEventStore: + """Create a RunEventStore based on run_events.backend configuration.""" + if config is None or config.backend == "memory": + return MemoryRunEventStore() + if config.backend == "db": + from deerflow.persistence.engine import get_session_factory + + sf = get_session_factory() + if sf is None: + # database.backend=memory but run_events.backend=db -> fallback + return MemoryRunEventStore() + from deerflow.runtime.events.store.db import DbRunEventStore + + return DbRunEventStore(sf, max_trace_content=config.max_trace_content) + if config.backend == "jsonl": + from deerflow.runtime.events.store.jsonl import JsonlRunEventStore + + return JsonlRunEventStore() + raise ValueError(f"Unknown run_events backend: {config.backend!r}") + + +__all__ = ["MemoryRunEventStore", "RunEventStore", "make_run_event_store"] diff --git a/backend/packages/harness/deerflow/runtime/events/store/base.py b/backend/packages/harness/deerflow/runtime/events/store/base.py new file mode 100644 index 000000000..df5136ba5 --- /dev/null +++ b/backend/packages/harness/deerflow/runtime/events/store/base.py @@ -0,0 +1,109 @@ +"""Abstract interface for run event storage. + +RunEventStore is the unified storage interface for run event streams. +Messages (frontend display) and execution traces (debugging/audit) go +through the same interface, distinguished by the ``category`` field. + +Implementations: +- MemoryRunEventStore: in-memory dict (development, tests) +- Future: DB-backed store (SQLAlchemy ORM), JSONL file store +""" + +from __future__ import annotations + +import abc + + +class RunEventStore(abc.ABC): + """Run event stream storage interface. + + All implementations must guarantee: + 1. put() events are retrievable in subsequent queries + 2. seq is strictly increasing within the same thread + 3. list_messages() only returns category="message" events + 4. list_events() returns all events for the specified run + 5. Returned dicts match the RunEvent field structure + """ + + @abc.abstractmethod + async def put( + self, + *, + thread_id: str, + run_id: str, + event_type: str, + category: str, + content: str | dict = "", + metadata: dict | None = None, + created_at: str | None = None, + ) -> dict: + """Write an event, auto-assign seq, return the complete record.""" + + @abc.abstractmethod + async def put_batch(self, events: list[dict]) -> list[dict]: + """Batch-write events. Used by RunJournal flush buffer. + + Each dict's keys match put()'s keyword arguments. + Returns complete records with seq assigned. + """ + + @abc.abstractmethod + async def list_messages( + self, + thread_id: str, + *, + limit: int = 50, + before_seq: int | None = None, + after_seq: int | None = None, + ) -> list[dict]: + """Return displayable messages (category=message) for a thread, ordered by seq ascending. + + Supports bidirectional cursor pagination: + - before_seq: return the last ``limit`` records with seq < before_seq (ascending) + - after_seq: return the first ``limit`` records with seq > after_seq (ascending) + - neither: return the latest ``limit`` records (ascending) + """ + + @abc.abstractmethod + async def list_events( + self, + thread_id: str, + run_id: str, + *, + event_types: list[str] | None = None, + limit: int = 500, + ) -> list[dict]: + """Return the full event stream for a run, ordered by seq ascending. + + Optionally filter by event_types. + """ + + @abc.abstractmethod + async def list_messages_by_run( + self, + thread_id: str, + run_id: str, + *, + limit: int = 50, + before_seq: int | None = None, + after_seq: int | None = None, + ) -> list[dict]: + """Return displayable messages (category=message) for a specific run, ordered by seq ascending. + + Supports bidirectional cursor pagination: + - after_seq: return the first ``limit`` records with seq > after_seq (ascending) + - before_seq: return the last ``limit`` records with seq < before_seq (ascending) + - neither: return the latest ``limit`` records (ascending) + """ + + @abc.abstractmethod + async def count_messages(self, thread_id: str) -> int: + """Count displayable messages (category=message) in a thread.""" + + @abc.abstractmethod + async def delete_by_thread(self, thread_id: str) -> int: + """Delete all events for a thread. Return the number of deleted events.""" + + @abc.abstractmethod + async def delete_by_run(self, thread_id: str, run_id: str) -> int: + """Delete all events for a specific run. Return the number of deleted events.""" diff --git a/backend/packages/harness/deerflow/runtime/events/store/db.py b/backend/packages/harness/deerflow/runtime/events/store/db.py new file mode 100644 index 000000000..e4a21d006 --- /dev/null +++ b/backend/packages/harness/deerflow/runtime/events/store/db.py @@ -0,0 +1,286 @@ +"""SQLAlchemy-backed RunEventStore implementation. + +Persists events to the ``run_events`` table. Trace content is truncated +at ``max_trace_content`` bytes to avoid bloating the database. +""" + +from __future__ import annotations + +import json +import logging +from datetime import UTC, datetime + +from sqlalchemy import delete, func, select +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker + +from deerflow.persistence.models.run_event import RunEventRow +from deerflow.runtime.events.store.base import RunEventStore +from deerflow.runtime.user_context import AUTO, _AutoSentinel, get_current_user, resolve_user_id + +logger = logging.getLogger(__name__) + + +class DbRunEventStore(RunEventStore): + def __init__(self, session_factory: async_sessionmaker[AsyncSession], *, max_trace_content: int = 10240): + self._sf = session_factory + self._max_trace_content = max_trace_content + + @staticmethod + def _row_to_dict(row: RunEventRow) -> dict: + d = row.to_dict() + d["metadata"] = d.pop("event_metadata", {}) + val = d.get("created_at") + if isinstance(val, datetime): + d["created_at"] = val.isoformat() + d.pop("id", None) + # Restore dict content that was JSON-serialized on write + raw = d.get("content", "") + if isinstance(raw, str) and d.get("metadata", {}).get("content_is_dict"): + try: + d["content"] = json.loads(raw) + except (json.JSONDecodeError, ValueError): + # Content looked like JSON (content_is_dict flag) but failed to parse; + # keep the raw string as-is. + logger.debug("Failed to deserialize content as JSON for event seq=%s", d.get("seq")) + return d + + def _truncate_trace(self, category: str, content: str | dict, metadata: dict | None) -> tuple[str | dict, dict]: + if category == "trace": + text = json.dumps(content, default=str, ensure_ascii=False) if isinstance(content, dict) else content + encoded = text.encode("utf-8") + if len(encoded) > self._max_trace_content: + # Truncate by bytes, then decode back (may cut a multi-byte char, so use errors="ignore") + content = encoded[: self._max_trace_content].decode("utf-8", errors="ignore") + metadata = {**(metadata or {}), "content_truncated": True, "original_byte_length": len(encoded)} + return content, metadata or {} + + @staticmethod + def _user_id_from_context() -> str | None: + """Soft read of user_id from contextvar for write paths. + + Returns ``None`` (no filter / no stamp) if contextvar is unset, + which is the expected case for background worker writes. HTTP + request writes will have the contextvar set by auth middleware + and get their user_id stamped automatically. + + Coerces ``user.id`` to ``str`` at the boundary: ``User.id`` is + typed as ``UUID`` by the auth layer, but ``run_events.user_id`` + is ``VARCHAR(64)`` and aiosqlite cannot bind a raw UUID object + to a VARCHAR column ("type 'UUID' is not supported") — the + INSERT would silently roll back and the worker would hang. + """ + user = get_current_user() + return str(user.id) if user is not None else None + + async def put(self, *, thread_id, run_id, event_type, category, content="", metadata=None, created_at=None): # noqa: D401 + """Write a single event — low-frequency path only. + + This opens a dedicated transaction with a FOR UPDATE lock to + assign a monotonic *seq*. For high-throughput writes use + :meth:`put_batch`, which acquires the lock once for the whole + batch. Currently the only caller is ``worker.run_agent`` for + the initial ``human_message`` event (once per run). + """ + content, metadata = self._truncate_trace(category, content, metadata) + if isinstance(content, dict): + db_content = json.dumps(content, default=str, ensure_ascii=False) + metadata = {**(metadata or {}), "content_is_dict": True} + else: + db_content = content + user_id = self._user_id_from_context() + async with self._sf() as session: + async with session.begin(): + # Use FOR UPDATE to serialize seq assignment within a thread. + # NOTE: with_for_update() on aggregates is a no-op on SQLite; + # the UNIQUE(thread_id, seq) constraint catches races there. + max_seq = await session.scalar(select(func.max(RunEventRow.seq)).where(RunEventRow.thread_id == thread_id).with_for_update()) + seq = (max_seq or 0) + 1 + row = RunEventRow( + thread_id=thread_id, + run_id=run_id, + user_id=user_id, + event_type=event_type, + category=category, + content=db_content, + event_metadata=metadata, + seq=seq, + created_at=datetime.fromisoformat(created_at) if created_at else datetime.now(UTC), + ) + session.add(row) + return self._row_to_dict(row) + + async def put_batch(self, events): + if not events: + return [] + user_id = self._user_id_from_context() + async with self._sf() as session: + async with session.begin(): + # Get max seq for the thread (assume all events in batch belong to same thread). + # NOTE: with_for_update() on aggregates is a no-op on SQLite; + # the UNIQUE(thread_id, seq) constraint catches races there. + thread_id = events[0]["thread_id"] + max_seq = await session.scalar(select(func.max(RunEventRow.seq)).where(RunEventRow.thread_id == thread_id).with_for_update()) + seq = max_seq or 0 + rows = [] + for e in events: + seq += 1 + content = e.get("content", "") + category = e.get("category", "trace") + metadata = e.get("metadata") + content, metadata = self._truncate_trace(category, content, metadata) + if isinstance(content, dict): + db_content = json.dumps(content, default=str, ensure_ascii=False) + metadata = {**(metadata or {}), "content_is_dict": True} + else: + db_content = content + row = RunEventRow( + thread_id=e["thread_id"], + run_id=e["run_id"], + user_id=e.get("user_id", user_id), + event_type=e["event_type"], + category=category, + content=db_content, + event_metadata=metadata, + seq=seq, + created_at=datetime.fromisoformat(e["created_at"]) if e.get("created_at") else datetime.now(UTC), + ) + session.add(row) + rows.append(row) + return [self._row_to_dict(r) for r in rows] + + async def list_messages( + self, + thread_id, + *, + limit=50, + before_seq=None, + after_seq=None, + user_id: str | None | _AutoSentinel = AUTO, + ): + resolved_user_id = resolve_user_id(user_id, method_name="DbRunEventStore.list_messages") + stmt = select(RunEventRow).where(RunEventRow.thread_id == thread_id, RunEventRow.category == "message") + if resolved_user_id is not None: + stmt = stmt.where(RunEventRow.user_id == resolved_user_id) + if before_seq is not None: + stmt = stmt.where(RunEventRow.seq < before_seq) + if after_seq is not None: + stmt = stmt.where(RunEventRow.seq > after_seq) + + if after_seq is not None: + # Forward pagination: first `limit` records after cursor + stmt = stmt.order_by(RunEventRow.seq.asc()).limit(limit) + async with self._sf() as session: + result = await session.execute(stmt) + return [self._row_to_dict(r) for r in result.scalars()] + else: + # before_seq or default (latest): take last `limit` records, return ascending + stmt = stmt.order_by(RunEventRow.seq.desc()).limit(limit) + async with self._sf() as session: + result = await session.execute(stmt) + rows = list(result.scalars()) + return [self._row_to_dict(r) for r in reversed(rows)] + + async def list_events( + self, + thread_id, + run_id, + *, + event_types=None, + limit=500, + user_id: str | None | _AutoSentinel = AUTO, + ): + resolved_user_id = resolve_user_id(user_id, method_name="DbRunEventStore.list_events") + stmt = select(RunEventRow).where(RunEventRow.thread_id == thread_id, RunEventRow.run_id == run_id) + if resolved_user_id is not None: + stmt = stmt.where(RunEventRow.user_id == resolved_user_id) + if event_types: + stmt = stmt.where(RunEventRow.event_type.in_(event_types)) + stmt = stmt.order_by(RunEventRow.seq.asc()).limit(limit) + async with self._sf() as session: + result = await session.execute(stmt) + return [self._row_to_dict(r) for r in result.scalars()] + + async def list_messages_by_run( + self, + thread_id, + run_id, + *, + limit=50, + before_seq=None, + after_seq=None, + user_id: str | None | _AutoSentinel = AUTO, + ): + resolved_user_id = resolve_user_id(user_id, method_name="DbRunEventStore.list_messages_by_run") + stmt = select(RunEventRow).where( + RunEventRow.thread_id == thread_id, + RunEventRow.run_id == run_id, + RunEventRow.category == "message", + ) + if resolved_user_id is not None: + stmt = stmt.where(RunEventRow.user_id == resolved_user_id) + if before_seq is not None: + stmt = stmt.where(RunEventRow.seq < before_seq) + if after_seq is not None: + stmt = stmt.where(RunEventRow.seq > after_seq) + + if after_seq is not None: + stmt = stmt.order_by(RunEventRow.seq.asc()).limit(limit) + async with self._sf() as session: + result = await session.execute(stmt) + return [self._row_to_dict(r) for r in result.scalars()] + else: + stmt = stmt.order_by(RunEventRow.seq.desc()).limit(limit) + async with self._sf() as session: + result = await session.execute(stmt) + rows = list(result.scalars()) + return [self._row_to_dict(r) for r in reversed(rows)] + + async def count_messages( + self, + thread_id, + *, + user_id: str | None | _AutoSentinel = AUTO, + ): + resolved_user_id = resolve_user_id(user_id, method_name="DbRunEventStore.count_messages") + stmt = select(func.count()).select_from(RunEventRow).where(RunEventRow.thread_id == thread_id, RunEventRow.category == "message") + if resolved_user_id is not None: + stmt = stmt.where(RunEventRow.user_id == resolved_user_id) + async with self._sf() as session: + return await session.scalar(stmt) or 0 + + async def delete_by_thread( + self, + thread_id, + *, + user_id: str | None | _AutoSentinel = AUTO, + ): + resolved_user_id = resolve_user_id(user_id, method_name="DbRunEventStore.delete_by_thread") + async with self._sf() as session: + count_conditions = [RunEventRow.thread_id == thread_id] + if resolved_user_id is not None: + count_conditions.append(RunEventRow.user_id == resolved_user_id) + count_stmt = select(func.count()).select_from(RunEventRow).where(*count_conditions) + count = await session.scalar(count_stmt) or 0 + if count > 0: + await session.execute(delete(RunEventRow).where(*count_conditions)) + await session.commit() + return count + + async def delete_by_run( + self, + thread_id, + run_id, + *, + user_id: str | None | _AutoSentinel = AUTO, + ): + resolved_user_id = resolve_user_id(user_id, method_name="DbRunEventStore.delete_by_run") + async with self._sf() as session: + count_conditions = [RunEventRow.thread_id == thread_id, RunEventRow.run_id == run_id] + if resolved_user_id is not None: + count_conditions.append(RunEventRow.user_id == resolved_user_id) + count_stmt = select(func.count()).select_from(RunEventRow).where(*count_conditions) + count = await session.scalar(count_stmt) or 0 + if count > 0: + await session.execute(delete(RunEventRow).where(*count_conditions)) + await session.commit() + return count diff --git a/backend/packages/harness/deerflow/runtime/events/store/jsonl.py b/backend/packages/harness/deerflow/runtime/events/store/jsonl.py new file mode 100644 index 000000000..378713afc --- /dev/null +++ b/backend/packages/harness/deerflow/runtime/events/store/jsonl.py @@ -0,0 +1,187 @@ +"""JSONL file-backed RunEventStore implementation. + +Each run's events are stored in a single file: +``.deer-flow/threads/{thread_id}/runs/{run_id}.jsonl`` + +All categories (message, trace, lifecycle) are in the same file. +This backend is suitable for lightweight single-node deployments. + +Known trade-off: ``list_messages()`` must scan all run files for a +thread since messages from multiple runs need unified seq ordering. +``list_events()`` reads only one file -- the fast path. +""" + +from __future__ import annotations + +import json +import logging +import re +from datetime import UTC, datetime +from pathlib import Path + +from deerflow.runtime.events.store.base import RunEventStore + +logger = logging.getLogger(__name__) + +_SAFE_ID_PATTERN = re.compile(r"^[A-Za-z0-9_\-]+$") + + +class JsonlRunEventStore(RunEventStore): + def __init__(self, base_dir: str | Path | None = None): + self._base_dir = Path(base_dir) if base_dir else Path(".deer-flow") + self._seq_counters: dict[str, int] = {} # thread_id -> current max seq + + @staticmethod + def _validate_id(value: str, label: str) -> str: + """Validate that an ID is safe for use in filesystem paths.""" + if not value or not _SAFE_ID_PATTERN.match(value): + raise ValueError(f"Invalid {label}: must be alphanumeric/dash/underscore, got {value!r}") + return value + + def _thread_dir(self, thread_id: str) -> Path: + self._validate_id(thread_id, "thread_id") + return self._base_dir / "threads" / thread_id / "runs" + + def _run_file(self, thread_id: str, run_id: str) -> Path: + self._validate_id(run_id, "run_id") + return self._thread_dir(thread_id) / f"{run_id}.jsonl" + + def _next_seq(self, thread_id: str) -> int: + self._seq_counters[thread_id] = self._seq_counters.get(thread_id, 0) + 1 + return self._seq_counters[thread_id] + + def _ensure_seq_loaded(self, thread_id: str) -> None: + """Load max seq from existing files if not yet cached.""" + if thread_id in self._seq_counters: + return + max_seq = 0 + thread_dir = self._thread_dir(thread_id) + if thread_dir.exists(): + for f in thread_dir.glob("*.jsonl"): + for line in f.read_text(encoding="utf-8").strip().splitlines(): + try: + record = json.loads(line) + max_seq = max(max_seq, record.get("seq", 0)) + except json.JSONDecodeError: + logger.debug("Skipping malformed JSONL line in %s", f) + continue + self._seq_counters[thread_id] = max_seq + + def _write_record(self, record: dict) -> None: + path = self._run_file(record["thread_id"], record["run_id"]) + path.parent.mkdir(parents=True, exist_ok=True) + with open(path, "a", encoding="utf-8") as f: + f.write(json.dumps(record, default=str, ensure_ascii=False) + "\n") + + def _read_thread_events(self, thread_id: str) -> list[dict]: + """Read all events for a thread, sorted by seq.""" + events = [] + thread_dir = self._thread_dir(thread_id) + if not thread_dir.exists(): + return events + for f in sorted(thread_dir.glob("*.jsonl")): + for line in f.read_text(encoding="utf-8").strip().splitlines(): + if not line: + continue + try: + events.append(json.loads(line)) + except json.JSONDecodeError: + logger.debug("Skipping malformed JSONL line in %s", f) + continue + events.sort(key=lambda e: e.get("seq", 0)) + return events + + def _read_run_events(self, thread_id: str, run_id: str) -> list[dict]: + """Read events for a specific run file.""" + path = self._run_file(thread_id, run_id) + if not path.exists(): + return [] + events = [] + for line in path.read_text(encoding="utf-8").strip().splitlines(): + if not line: + continue + try: + events.append(json.loads(line)) + except json.JSONDecodeError: + logger.debug("Skipping malformed JSONL line in %s", path) + continue + events.sort(key=lambda e: e.get("seq", 0)) + return events + + async def put(self, *, thread_id, run_id, event_type, category, content="", metadata=None, created_at=None): + self._ensure_seq_loaded(thread_id) + seq = self._next_seq(thread_id) + record = { + "thread_id": thread_id, + "run_id": run_id, + "event_type": event_type, + "category": category, + "content": content, + "metadata": metadata or {}, + "seq": seq, + "created_at": created_at or datetime.now(UTC).isoformat(), + } + self._write_record(record) + return record + + async def put_batch(self, events): + if not events: + return [] + results = [] + for ev in events: + record = await self.put(**ev) + results.append(record) + return results + + async def list_messages(self, thread_id, *, limit=50, before_seq=None, after_seq=None): + all_events = self._read_thread_events(thread_id) + messages = [e for e in all_events if e.get("category") == "message"] + + if before_seq is not None: + messages = [e for e in messages if e["seq"] < before_seq] + return messages[-limit:] + elif after_seq is not None: + messages = [e for e in messages if e["seq"] > after_seq] + return messages[:limit] + else: + return messages[-limit:] + + async def list_events(self, thread_id, run_id, *, event_types=None, limit=500): + events = self._read_run_events(thread_id, run_id) + if event_types is not None: + events = [e for e in events if e.get("event_type") in event_types] + return events[:limit] + + async def list_messages_by_run(self, thread_id, run_id, *, limit=50, before_seq=None, after_seq=None): + events = self._read_run_events(thread_id, run_id) + filtered = [e for e in events if e.get("category") == "message"] + if before_seq is not None: + filtered = [e for e in filtered if e.get("seq", 0) < before_seq] + if after_seq is not None: + filtered = [e for e in filtered if e.get("seq", 0) > after_seq] + if after_seq is not None: + return filtered[:limit] + else: + return filtered[-limit:] if len(filtered) > limit else filtered + + async def count_messages(self, thread_id): + all_events = self._read_thread_events(thread_id) + return sum(1 for e in all_events if e.get("category") == "message") + + async def delete_by_thread(self, thread_id): + all_events = self._read_thread_events(thread_id) + count = len(all_events) + thread_dir = self._thread_dir(thread_id) + if thread_dir.exists(): + for f in thread_dir.glob("*.jsonl"): + f.unlink() + self._seq_counters.pop(thread_id, None) + return count + + async def delete_by_run(self, thread_id, run_id): + events = self._read_run_events(thread_id, run_id) + count = len(events) + path = self._run_file(thread_id, run_id) + if path.exists(): + path.unlink() + return count diff --git a/backend/packages/harness/deerflow/runtime/events/store/memory.py b/backend/packages/harness/deerflow/runtime/events/store/memory.py new file mode 100644 index 000000000..cf70e1cdf --- /dev/null +++ b/backend/packages/harness/deerflow/runtime/events/store/memory.py @@ -0,0 +1,128 @@ +"""In-memory RunEventStore. Used when run_events.backend=memory (default) and in tests. + +Thread-safe for single-process async usage (no threading locks needed +since all mutations happen within the same event loop). +""" + +from __future__ import annotations + +from datetime import UTC, datetime + +from deerflow.runtime.events.store.base import RunEventStore + + +class MemoryRunEventStore(RunEventStore): + def __init__(self) -> None: + self._events: dict[str, list[dict]] = {} # thread_id -> sorted event list + self._seq_counters: dict[str, int] = {} # thread_id -> last assigned seq + + def _next_seq(self, thread_id: str) -> int: + current = self._seq_counters.get(thread_id, 0) + next_val = current + 1 + self._seq_counters[thread_id] = next_val + return next_val + + def _put_one( + self, + *, + thread_id: str, + run_id: str, + event_type: str, + category: str, + content: str | dict = "", + metadata: dict | None = None, + created_at: str | None = None, + ) -> dict: + seq = self._next_seq(thread_id) + record = { + "thread_id": thread_id, + "run_id": run_id, + "event_type": event_type, + "category": category, + "content": content, + "metadata": metadata or {}, + "seq": seq, + "created_at": created_at or datetime.now(UTC).isoformat(), + } + self._events.setdefault(thread_id, []).append(record) + return record + + async def put( + self, + *, + thread_id, + run_id, + event_type, + category, + content="", + metadata=None, + created_at=None, + ): + return self._put_one( + thread_id=thread_id, + run_id=run_id, + event_type=event_type, + category=category, + content=content, + metadata=metadata, + created_at=created_at, + ) + + async def put_batch(self, events): + results = [] + for ev in events: + record = self._put_one(**ev) + results.append(record) + return results + + async def list_messages(self, thread_id, *, limit=50, before_seq=None, after_seq=None): + all_events = self._events.get(thread_id, []) + messages = [e for e in all_events if e["category"] == "message"] + + if before_seq is not None: + messages = [e for e in messages if e["seq"] < before_seq] + # Take the last `limit` records + return messages[-limit:] + elif after_seq is not None: + messages = [e for e in messages if e["seq"] > after_seq] + return messages[:limit] + else: + # Return the latest `limit` records, ascending + return messages[-limit:] + + async def list_events(self, thread_id, run_id, *, event_types=None, limit=500): + all_events = self._events.get(thread_id, []) + filtered = [e for e in all_events if e["run_id"] == run_id] + if event_types is not None: + filtered = [e for e in filtered if e["event_type"] in event_types] + return filtered[:limit] + + async def list_messages_by_run(self, thread_id, run_id, *, limit=50, before_seq=None, after_seq=None): + all_events = self._events.get(thread_id, []) + filtered = [e for e in all_events if e["run_id"] == run_id and e["category"] == "message"] + if before_seq is not None: + filtered = [e for e in filtered if e["seq"] < before_seq] + if after_seq is not None: + filtered = [e for e in filtered if e["seq"] > after_seq] + if after_seq is not None: + return filtered[:limit] + else: + return filtered[-limit:] if len(filtered) > limit else filtered + + async def count_messages(self, thread_id): + all_events = self._events.get(thread_id, []) + return sum(1 for e in all_events if e["category"] == "message") + + async def delete_by_thread(self, thread_id): + events = self._events.pop(thread_id, []) + self._seq_counters.pop(thread_id, None) + return len(events) + + async def delete_by_run(self, thread_id, run_id): + all_events = self._events.get(thread_id, []) + if not all_events: + return 0 + remaining = [e for e in all_events if e["run_id"] != run_id] + removed = len(all_events) - len(remaining) + self._events[thread_id] = remaining + return removed diff --git a/backend/packages/harness/deerflow/runtime/journal.py b/backend/packages/harness/deerflow/runtime/journal.py new file mode 100644 index 000000000..a0c2d029b --- /dev/null +++ b/backend/packages/harness/deerflow/runtime/journal.py @@ -0,0 +1,382 @@ +"""Run event capture via LangChain callbacks. + +RunJournal sits between LangChain's callback mechanism and the pluggable +RunEventStore. It standardizes callback data into RunEvent records and +handles token usage accumulation. + +Key design decisions: +- on_llm_new_token is NOT implemented -- only complete messages via on_llm_end +- on_chat_model_start captures structured prompts as llm_request (OpenAI format) and + extracts the first human message for run.input, because it is more reliable than + on_chain_start (fires on every node) — messages here are fully structured. +- on_chain_start with parent_run_id=None emits a run.start trace marking root invocation. +- on_llm_end emits llm_response in OpenAI Chat Completions format +- Token usage accumulated in memory, written to RunRow on run completion +- Caller identification via tags injection (lead_agent / subagent:{name} / middleware:{name}) +""" + +from __future__ import annotations + +import asyncio +import logging +import time +from datetime import UTC, datetime +from typing import TYPE_CHECKING, Any, cast +from uuid import UUID + +from langchain_core.callbacks import BaseCallbackHandler +from langchain_core.messages import AnyMessage, BaseMessage, HumanMessage, ToolMessage +from langgraph.types import Command + +if TYPE_CHECKING: + from deerflow.runtime.events.store.base import RunEventStore + +logger = logging.getLogger(__name__) + + +class RunJournal(BaseCallbackHandler): + """LangChain callback handler that captures events to RunEventStore.""" + + def __init__( + self, + run_id: str, + thread_id: str, + event_store: RunEventStore, + *, + track_token_usage: bool = True, + flush_threshold: int = 20, + ): + super().__init__() + self.run_id = run_id + self.thread_id = thread_id + self._store = event_store + self._track_tokens = track_token_usage + self._flush_threshold = flush_threshold + + # Write buffer + self._buffer: list[dict] = [] + self._pending_flush_tasks: set[asyncio.Task[None]] = set() + + # Token accumulators + self._total_input_tokens = 0 + self._total_output_tokens = 0 + self._total_tokens = 0 + self._llm_call_count = 0 + + # Convenience fields + self._last_ai_msg: str | None = None + self._first_human_msg: str | None = None + self._msg_count = 0 + + # Latency tracking + self._llm_start_times: dict[str, float] = {} # langchain run_id -> start time + + # LLM request/response tracking + self._llm_call_index = 0 + self._seen_llm_starts: set[str] = set() # langchain run_ids that fired on_chat_model_start + + # -- Lifecycle callbacks -- + + def on_chain_start( + self, + serialized: dict[str, Any], + inputs: dict[str, Any], + *, + run_id: UUID, + parent_run_id: UUID | None = None, + tags: list[str] | None = None, + metadata: dict[str, Any] | None = None, + **kwargs: Any, + ) -> None: + caller = self._identify_caller(tags) + if parent_run_id is None: + # Root graph invocation — emit a single trace event for the run start. + chain_name = (serialized or {}).get("name", "unknown") + self._put( + event_type="run.start", + category="trace", + content={"chain": chain_name}, + metadata={"caller": caller, **(metadata or {})}, + ) + + def on_chain_end(self, outputs: Any, *, run_id: UUID, **kwargs: Any) -> None: + self._put(event_type="run.end", category="outputs", content=outputs, metadata={"status": "success"}) + self._flush_sync() + + def on_chain_error(self, error: BaseException, *, run_id: UUID, **kwargs: Any) -> None: + self._put( + event_type="run.error", + category="error", + content=str(error), + metadata={"error_type": type(error).__name__}, + ) + self._flush_sync() + + # -- LLM callbacks -- + + def on_chat_model_start( + self, + serialized: dict, + messages: list[list[BaseMessage]], + *, + run_id: UUID, + tags: list[str] | None = None, + **kwargs: Any, + ) -> None: + """Capture structured prompt messages for llm_request event. + + This is also the canonical place to extract the first human message: + messages are fully structured here, it fires only on real LLM calls, + and the content is never compressed by checkpoint trimming. + """ + rid = str(run_id) + self._llm_start_times[rid] = time.monotonic() + self._llm_call_index += 1 + self._seen_llm_starts.add(rid) + + logger.debug( + "on_chat_model_start %s: tags=%s num_batches=%d message_counts=%s", + run_id, + tags, + len(messages), + [len(batch) for batch in messages], + ) + + # Capture the first human message sent to any LLM in this run. + if not self._first_human_msg and messages: + for batch in reversed(messages): + for m in reversed(batch): + if isinstance(m, HumanMessage) and m.name != "summary": + caller = self._identify_caller(tags) + self.set_first_human_message(m.text) + self._put( + event_type="llm.human.input", + category="message", + content=m.model_dump(), + metadata={"caller": caller}, + ) + break + if self._first_human_msg: + break + + def on_llm_start(self, serialized: dict, prompts: list[str], *, run_id: UUID, parent_run_id: UUID | None = None, tags: list[str] | None = None, metadata: dict[str, Any] | None = None, **kwargs: Any) -> None: + # Fallback: on_chat_model_start is preferred. This just tracks latency. + self._llm_start_times[str(run_id)] = time.monotonic() + + def on_llm_end( + self, + response: Any, + *, + run_id: UUID, + parent_run_id: UUID | None = None, + tags: list[str] | None = None, + **kwargs: Any, + ) -> None: + messages: list[AnyMessage] = [] + logger.debug("on_llm_end %s: tags=%s", run_id, tags) + for generation in response.generations: + for gen in generation: + if hasattr(gen, "message"): + messages.append(gen.message) + else: + logger.warning(f"on_llm_end {run_id}: generation has no message attribute: {gen}") + + for message in messages: + caller = self._identify_caller(tags) + + # Latency + rid = str(run_id) + start = self._llm_start_times.pop(rid, None) + latency_ms = int((time.monotonic() - start) * 1000) if start else None + + # Token usage from message + usage = getattr(message, "usage_metadata", None) + usage_dict = dict(usage) if usage else {} + + # Resolve call index + call_index = self._llm_call_index + if rid not in self._seen_llm_starts: + # Fallback: on_chat_model_start was not called + self._llm_call_index += 1 + call_index = self._llm_call_index + self._seen_llm_starts.add(rid) + + # Trace event: llm_response (OpenAI completion format) + self._put( + event_type="llm.ai.response", + category="message", + content=message.model_dump(), + metadata={ + "caller": caller, + "usage": usage_dict, + "latency_ms": latency_ms, + "llm_call_index": call_index, + }, + ) + + # Token accumulation + if self._track_tokens: + input_tk = usage_dict.get("input_tokens", 0) or 0 + output_tk = usage_dict.get("output_tokens", 0) or 0 + total_tk = usage_dict.get("total_tokens", 0) or 0 + if total_tk == 0: + total_tk = input_tk + output_tk + if total_tk > 0: + self._total_input_tokens += input_tk + self._total_output_tokens += output_tk + self._total_tokens += total_tk + self._llm_call_count += 1 + + def on_llm_error(self, error: BaseException, *, run_id: UUID, **kwargs: Any) -> None: + self._llm_start_times.pop(str(run_id), None) + self._put(event_type="llm.error", category="trace", content=str(error)) + + def on_tool_start(self, serialized, input_str, *, run_id, parent_run_id=None, tags=None, metadata=None, inputs=None, **kwargs): + """Handle tool start event, cache tool call ID for later correlation""" + tool_call_id = str(run_id) + logger.debug("Tool start for node %s, tool_call_id=%s, tags=%s", run_id, tool_call_id, tags) + + def on_tool_end(self, output, *, run_id, parent_run_id=None, **kwargs): + """Handle tool end event, append message and clear node data""" + try: + if isinstance(output, ToolMessage): + msg = cast(ToolMessage, output) + self._put(event_type="llm.tool.result", category="message", content=msg.model_dump()) + elif isinstance(output, Command): + cmd = cast(Command, output) + messages = cmd.update.get("messages", []) + for message in messages: + if isinstance(message, BaseMessage): + self._put(event_type="llm.tool.result", category="message", content=message.model_dump()) + else: + logger.warning(f"on_tool_end {run_id}: command update message is not BaseMessage: {type(message)}") + else: + logger.warning(f"on_tool_end {run_id}: output is not ToolMessage: {type(output)}") + finally: + logger.debug("Tool end for node %s", run_id) + + # -- Internal methods -- + + def _put(self, *, event_type: str, category: str, content: str | dict = "", metadata: dict | None = None) -> None: + self._buffer.append( + { + "thread_id": self.thread_id, + "run_id": self.run_id, + "event_type": event_type, + "category": category, + "content": content, + "metadata": metadata or {}, + "created_at": datetime.now(UTC).isoformat(), + } + ) + if len(self._buffer) >= self._flush_threshold: + self._flush_sync() + + def _flush_sync(self) -> None: + """Best-effort flush of buffer to RunEventStore. + + BaseCallbackHandler methods are synchronous. If an event loop is + running we schedule an async ``put_batch``; otherwise the events + stay in the buffer and are flushed later by the async ``flush()`` + call in the worker's ``finally`` block. + """ + if not self._buffer: + return + # Skip if a flush is already in flight — avoids concurrent writes + # to the same SQLite file from multiple fire-and-forget tasks. + if self._pending_flush_tasks: + return + try: + loop = asyncio.get_running_loop() + except RuntimeError: + # No event loop — keep events in buffer for later async flush. + return + batch = self._buffer.copy() + self._buffer.clear() + task = loop.create_task(self._flush_async(batch)) + self._pending_flush_tasks.add(task) + task.add_done_callback(self._on_flush_done) + + async def _flush_async(self, batch: list[dict]) -> None: + try: + await self._store.put_batch(batch) + except Exception: + logger.warning( + "Failed to flush %d events for run %s — returning to buffer", + len(batch), + self.run_id, + exc_info=True, + ) + # Return failed events to buffer for retry on next flush + self._buffer = batch + self._buffer + + def _on_flush_done(self, task: asyncio.Task) -> None: + self._pending_flush_tasks.discard(task) + if task.cancelled(): + return + exc = task.exception() + if exc: + logger.warning("Journal flush task failed: %s", exc) + + def _identify_caller(self, tags: list[str] | None) -> str: + _tags = tags or [] + for tag in _tags: + if isinstance(tag, str) and (tag.startswith("subagent:") or tag.startswith("middleware:") or tag == "lead_agent"): + return tag + # Default to lead_agent: the main agent graph does not inject + # callback tags, while subagents and middleware explicitly tag + # themselves. + return "lead_agent" + + # -- Public methods (called by worker) -- + + def set_first_human_message(self, content: str) -> None: + """Record the first human message for convenience fields.""" + self._first_human_msg = content[:2000] if content else None + + def record_middleware(self, tag: str, *, name: str, hook: str, action: str, changes: dict) -> None: + """Record a middleware state-change event. + + Called by middleware implementations when they perform a meaningful + state change (e.g., title generation, summarization, HITL approval). + Pure-observation middleware should not call this. + + Args: + tag: Short identifier for the middleware (e.g., "title", "summarize", + "guardrail"). Used to form event_type="middleware:{tag}". + name: Full middleware class name. + hook: Lifecycle hook that triggered the action (e.g., "after_model"). + action: Specific action performed (e.g., "generate_title"). + changes: Dict describing the state changes made. + """ + self._put( + event_type=f"middleware:{tag}", + category="middleware", + content={"name": name, "hook": hook, "action": action, "changes": changes}, + ) + + async def flush(self) -> None: + """Force flush remaining buffer. Called in worker's finally block.""" + if self._pending_flush_tasks: + await asyncio.gather(*tuple(self._pending_flush_tasks), return_exceptions=True) + + while self._buffer: + batch = self._buffer[: self._flush_threshold] + del self._buffer[: self._flush_threshold] + try: + await self._store.put_batch(batch) + except Exception: + self._buffer = batch + self._buffer + raise + + def get_completion_data(self) -> dict: + """Return accumulated token and message data for run completion.""" + return { + "total_input_tokens": self._total_input_tokens, + "total_output_tokens": self._total_output_tokens, + "total_tokens": self._total_tokens, + "llm_call_count": self._llm_call_count, + "message_count": self._msg_count, + "last_ai_message": self._last_ai_msg, + "first_human_message": self._first_human_msg, + } diff --git a/backend/packages/harness/deerflow/runtime/runs/__init__.py b/backend/packages/harness/deerflow/runtime/runs/__init__.py index afed90f48..9faa30c17 100644 --- a/backend/packages/harness/deerflow/runtime/runs/__init__.py +++ b/backend/packages/harness/deerflow/runtime/runs/__init__.py @@ -2,11 +2,12 @@ from .manager import ConflictError, RunManager, RunRecord, UnsupportedStrategyError from .schemas import DisconnectMode, RunStatus -from .worker import run_agent +from .worker import RunContext, run_agent __all__ = [ "ConflictError", "DisconnectMode", + "RunContext", "RunManager", "RunRecord", "RunStatus", diff --git a/backend/packages/harness/deerflow/runtime/runs/manager.py b/backend/packages/harness/deerflow/runtime/runs/manager.py index e61a1707f..a54a408b8 100644 --- a/backend/packages/harness/deerflow/runtime/runs/manager.py +++ b/backend/packages/harness/deerflow/runtime/runs/manager.py @@ -1,4 +1,4 @@ -"""In-memory run registry.""" +"""In-memory run registry with optional persistent RunStore backing.""" from __future__ import annotations @@ -7,9 +7,13 @@ import logging import uuid from dataclasses import dataclass, field from datetime import UTC, datetime +from typing import TYPE_CHECKING from .schemas import DisconnectMode, RunStatus +if TYPE_CHECKING: + from deerflow.runtime.runs.store.base import RunStore + logger = logging.getLogger(__name__) @@ -38,11 +42,43 @@ class RunRecord: class RunManager: - """In-memory run registry. All mutations are protected by an asyncio lock.""" + """In-memory run registry with optional persistent RunStore backing. - def __init__(self) -> None: + All mutations are protected by an asyncio lock. When a ``store`` is + provided, serializable metadata is also persisted to the store so + that run history survives process restarts. + """ + + def __init__(self, store: RunStore | None = None) -> None: self._runs: dict[str, RunRecord] = {} self._lock = asyncio.Lock() + self._store = store + + async def _persist_to_store(self, record: RunRecord) -> None: + """Best-effort persist run record to backing store.""" + if self._store is None: + return + try: + await self._store.put( + record.run_id, + thread_id=record.thread_id, + assistant_id=record.assistant_id, + status=record.status.value, + multitask_strategy=record.multitask_strategy, + metadata=record.metadata or {}, + kwargs=record.kwargs or {}, + created_at=record.created_at, + ) + except Exception: + logger.warning("Failed to persist run %s to store", record.run_id, exc_info=True) + + async def update_run_completion(self, run_id: str, **kwargs) -> None: + """Persist token usage and completion data to the backing store.""" + if self._store is not None: + try: + await self._store.update_run_completion(run_id, **kwargs) + except Exception: + logger.warning("Failed to persist run completion for %s", run_id, exc_info=True) async def create( self, @@ -71,6 +107,7 @@ class RunManager: ) async with self._lock: self._runs[run_id] = record + await self._persist_to_store(record) logger.info("Run created: run_id=%s thread_id=%s", run_id, thread_id) return record @@ -83,7 +120,7 @@ class RunManager: async with self._lock: # Dict insertion order matches creation order, so reversing it gives # us deterministic newest-first results even when timestamps tie. - return [r for r in reversed(self._runs.values()) if r.thread_id == thread_id] + return [r for r in self._runs.values() if r.thread_id == thread_id] async def set_status(self, run_id: str, status: RunStatus, *, error: str | None = None) -> None: """Transition a run to a new status.""" @@ -96,6 +133,11 @@ class RunManager: record.updated_at = _now_iso() if error is not None: record.error = error + if self._store is not None: + try: + await self._store.update_status(run_id, status.value, error=error) + except Exception: + logger.warning("Failed to persist status update for run %s", run_id, exc_info=True) logger.info("Run %s -> %s", run_id, status.value) async def cancel(self, run_id: str, *, action: str = "interrupt") -> bool: @@ -185,6 +227,7 @@ class RunManager: ) self._runs[run_id] = record + await self._persist_to_store(record) logger.info("Run created: run_id=%s thread_id=%s", run_id, thread_id) return record diff --git a/backend/packages/harness/deerflow/runtime/runs/store/__init__.py b/backend/packages/harness/deerflow/runtime/runs/store/__init__.py new file mode 100644 index 000000000..265a6fffb --- /dev/null +++ b/backend/packages/harness/deerflow/runtime/runs/store/__init__.py @@ -0,0 +1,4 @@ +from deerflow.runtime.runs.store.base import RunStore +from deerflow.runtime.runs.store.memory import MemoryRunStore + +__all__ = ["MemoryRunStore", "RunStore"] diff --git a/backend/packages/harness/deerflow/runtime/runs/store/base.py b/backend/packages/harness/deerflow/runtime/runs/store/base.py new file mode 100644 index 000000000..518a1903c --- /dev/null +++ b/backend/packages/harness/deerflow/runtime/runs/store/base.py @@ -0,0 +1,95 @@ +"""Abstract interface for run metadata storage. + +RunManager depends on this interface. Implementations: +- MemoryRunStore: in-memory dict (development, tests) +- Future: RunRepository backed by SQLAlchemy ORM + +All methods accept an optional user_id for user isolation. +When user_id is None, no user filtering is applied (single-user mode). +""" + +from __future__ import annotations + +import abc +from typing import Any + + +class RunStore(abc.ABC): + @abc.abstractmethod + async def put( + self, + run_id: str, + *, + thread_id: str, + assistant_id: str | None = None, + user_id: str | None = None, + status: str = "pending", + multitask_strategy: str = "reject", + metadata: dict[str, Any] | None = None, + kwargs: dict[str, Any] | None = None, + error: str | None = None, + created_at: str | None = None, + ) -> None: + pass + + @abc.abstractmethod + async def get(self, run_id: str) -> dict[str, Any] | None: + pass + + @abc.abstractmethod + async def list_by_thread( + self, + thread_id: str, + *, + user_id: str | None = None, + limit: int = 100, + ) -> list[dict[str, Any]]: + pass + + @abc.abstractmethod + async def update_status( + self, + run_id: str, + status: str, + *, + error: str | None = None, + ) -> None: + pass + + @abc.abstractmethod + async def delete(self, run_id: str) -> None: + pass + + @abc.abstractmethod + async def update_run_completion( + self, + run_id: str, + *, + status: str, + total_input_tokens: int = 0, + total_output_tokens: int = 0, + total_tokens: int = 0, + llm_call_count: int = 0, + lead_agent_tokens: int = 0, + subagent_tokens: int = 0, + middleware_tokens: int = 0, + message_count: int = 0, + last_ai_message: str | None = None, + first_human_message: str | None = None, + error: str | None = None, + ) -> None: + pass + + @abc.abstractmethod + async def list_pending(self, *, before: str | None = None) -> list[dict[str, Any]]: + pass + + @abc.abstractmethod + async def aggregate_tokens_by_thread(self, thread_id: str) -> dict[str, Any]: + """Aggregate token usage for completed runs in a thread. + + Returns a dict with keys: total_tokens, total_input_tokens, + total_output_tokens, total_runs, by_model (model_name → {tokens, runs}), + by_caller ({lead_agent, subagent, middleware}). + """ + pass diff --git a/backend/packages/harness/deerflow/runtime/runs/store/memory.py b/backend/packages/harness/deerflow/runtime/runs/store/memory.py new file mode 100644 index 000000000..5a14af3df --- /dev/null +++ b/backend/packages/harness/deerflow/runtime/runs/store/memory.py @@ -0,0 +1,98 @@ +"""In-memory RunStore. Used when database.backend=memory (default) and in tests. + +Equivalent to the original RunManager._runs dict behavior. +""" + +from __future__ import annotations + +from datetime import UTC, datetime +from typing import Any + +from deerflow.runtime.runs.store.base import RunStore + + +class MemoryRunStore(RunStore): + def __init__(self) -> None: + self._runs: dict[str, dict[str, Any]] = {} + + async def put( + self, + run_id, + *, + thread_id, + assistant_id=None, + user_id=None, + status="pending", + multitask_strategy="reject", + metadata=None, + kwargs=None, + error=None, + created_at=None, + ): + now = datetime.now(UTC).isoformat() + self._runs[run_id] = { + "run_id": run_id, + "thread_id": thread_id, + "assistant_id": assistant_id, + "user_id": user_id, + "status": status, + "multitask_strategy": multitask_strategy, + "metadata": metadata or {}, + "kwargs": kwargs or {}, + "error": error, + "created_at": created_at or now, + "updated_at": now, + } + + async def get(self, run_id): + return self._runs.get(run_id) + + async def list_by_thread(self, thread_id, *, user_id=None, limit=100): + results = [r for r in self._runs.values() if r["thread_id"] == thread_id and (user_id is None or r.get("user_id") == user_id)] + results.sort(key=lambda r: r["created_at"], reverse=True) + return results[:limit] + + async def update_status(self, run_id, status, *, error=None): + if run_id in self._runs: + self._runs[run_id]["status"] = status + if error is not None: + self._runs[run_id]["error"] = error + self._runs[run_id]["updated_at"] = datetime.now(UTC).isoformat() + + async def delete(self, run_id): + self._runs.pop(run_id, None) + + async def update_run_completion(self, run_id, *, status, **kwargs): + if run_id in self._runs: + self._runs[run_id]["status"] = status + for key, value in kwargs.items(): + if value is not None: + self._runs[run_id][key] = value + self._runs[run_id]["updated_at"] = datetime.now(UTC).isoformat() + + async def list_pending(self, *, before=None): + now = before or datetime.now(UTC).isoformat() + results = [r for r in self._runs.values() if r["status"] == "pending" and r["created_at"] <= now] + results.sort(key=lambda r: r["created_at"]) + return results + + async def aggregate_tokens_by_thread(self, thread_id: str) -> dict[str, Any]: + completed = [r for r in self._runs.values() if r["thread_id"] == thread_id and r.get("status") in ("success", "error")] + by_model: dict[str, dict] = {} + for r in completed: + model = r.get("model_name") or "unknown" + entry = by_model.setdefault(model, {"tokens": 0, "runs": 0}) + entry["tokens"] += r.get("total_tokens", 0) + entry["runs"] += 1 + return { + "total_tokens": sum(r.get("total_tokens", 0) for r in completed), + "total_input_tokens": sum(r.get("total_input_tokens", 0) for r in completed), + "total_output_tokens": sum(r.get("total_output_tokens", 0) for r in completed), + "total_runs": len(completed), + "by_model": by_model, + "by_caller": { + "lead_agent": sum(r.get("lead_agent_tokens", 0) for r in completed), + "subagent": sum(r.get("subagent_tokens", 0) for r in completed), + "middleware": sum(r.get("middleware_tokens", 0) for r in completed), + }, + } diff --git a/backend/packages/harness/deerflow/runtime/runs/worker.py b/backend/packages/harness/deerflow/runtime/runs/worker.py index c8b074f7a..1223c2127 100644 --- a/backend/packages/harness/deerflow/runtime/runs/worker.py +++ b/backend/packages/harness/deerflow/runtime/runs/worker.py @@ -19,8 +19,14 @@ import asyncio import copy import inspect import logging -from typing import Any, Literal +from dataclasses import dataclass, field +from functools import lru_cache +from typing import TYPE_CHECKING, Any, Literal +if TYPE_CHECKING: + from langchain_core.messages import HumanMessage + +from deerflow.config.app_config import AppConfig from deerflow.runtime.serialization import serialize from deerflow.runtime.stream_bridge import StreamBridge @@ -33,13 +39,67 @@ logger = logging.getLogger(__name__) _VALID_LG_MODES = {"values", "updates", "checkpoints", "tasks", "debug", "messages", "custom"} +def _build_runtime_context(thread_id: str, run_id: str, caller_context: Any | None) -> dict[str, Any]: + """Build the dict that becomes ``ToolRuntime.context`` for the run. + + Always includes ``thread_id`` and ``run_id``. Additional keys from the caller's + ``config['context']`` (e.g. ``agent_name`` for the bootstrap flow — issue #2677) + are merged in but never override ``thread_id``/``run_id``. + + langgraph 1.1+ surfaces this as ``runtime.context`` via the parent runtime stored + under ``config['configurable']['__pregel_runtime']`` — see + ``langgraph.pregel.main`` where ``parent_runtime.merge(...)`` is invoked. + """ + runtime_ctx: dict[str, Any] = {"thread_id": thread_id, "run_id": run_id} + if isinstance(caller_context, dict): + for key, value in caller_context.items(): + runtime_ctx.setdefault(key, value) + return runtime_ctx + + +@dataclass(frozen=True) +class RunContext: + """Infrastructure dependencies for a single agent run. + + Groups checkpointer, store, and persistence-related singletons so that + ``run_agent`` (and any future callers) receive one object instead of a + growing list of keyword arguments. + """ + + checkpointer: Any + store: Any | None = field(default=None) + event_store: Any | None = field(default=None) + run_events_config: Any | None = field(default=None) + thread_store: Any | None = field(default=None) + app_config: AppConfig | None = field(default=None) + + +def _compute_agent_factory_supports_app_config(agent_factory: Any) -> bool: + try: + return "app_config" in inspect.signature(agent_factory).parameters + except (TypeError, ValueError): + return False + + +@lru_cache(maxsize=128) +def _cached_agent_factory_supports_app_config(agent_factory: Any) -> bool: + return _compute_agent_factory_supports_app_config(agent_factory) + + +def _agent_factory_supports_app_config(agent_factory: Any) -> bool: + try: + return _cached_agent_factory_supports_app_config(agent_factory) + except TypeError: + # Some callable instances are unhashable; fall back to a direct check. + return _compute_agent_factory_supports_app_config(agent_factory) + + async def run_agent( bridge: StreamBridge, run_manager: RunManager, record: RunRecord, *, - checkpointer: Any, - store: Any | None = None, + ctx: RunContext, agent_factory: Any, graph_input: dict, config: dict, @@ -50,6 +110,13 @@ async def run_agent( ) -> None: """Execute an agent in the background, publishing events to *bridge*.""" + # Unpack infrastructure dependencies from RunContext. + checkpointer = ctx.checkpointer + store = ctx.store + event_store = ctx.event_store + run_events_config = ctx.run_events_config + thread_store = ctx.thread_store + run_id = record.run_id thread_id = record.thread_id requested_modes: set[str] = set(stream_modes or ["values"]) @@ -57,6 +124,10 @@ async def run_agent( pre_run_snapshot: dict[str, Any] | None = None snapshot_capture_failed = False + journal = None + + journal = None + # Track whether "events" was requested but skipped if "events" in requested_modes: logger.info( @@ -65,6 +136,22 @@ async def run_agent( ) try: + # Initialize RunJournal + write human_message event. + # These are inside the try block so any exception (e.g. a DB + # error writing the event) flows through the except/finally + # path that publishes an "end" event to the SSE bridge — + # otherwise a failure here would leave the stream hanging + # with no terminator. + if event_store is not None: + from deerflow.runtime.journal import RunJournal + + journal = RunJournal( + run_id=run_id, + thread_id=thread_id, + event_store=event_store, + track_token_usage=getattr(run_events_config, "track_token_usage", True), + ) + # 1. Mark running await run_manager.set_status(run_id, RunStatus.running) @@ -100,18 +187,27 @@ async def run_agent( from langchain_core.runnables import RunnableConfig from langgraph.runtime import Runtime - # Inject runtime context so middlewares can access thread_id - # (langgraph-cli does this automatically; we must do it manually) - runtime = Runtime(context={"thread_id": thread_id}, store=store) - # If the caller already set a ``context`` key (LangGraph >= 0.6.0 - # prefers it over ``configurable`` for thread-level data), make - # sure ``thread_id`` is available there too. + # Inject runtime context so middlewares and tools (via ToolRuntime.context) can + # access thread-level data. langgraph-cli does this automatically; we must do it + # manually here because we drive the graph through ``agent.astream(config=...)`` + # without passing the official ``context=`` parameter. + runtime_ctx = _build_runtime_context(thread_id, run_id, config.get("context")) if "context" in config and isinstance(config["context"], dict): config["context"].setdefault("thread_id", thread_id) + config["context"].setdefault("run_id", run_id) + runtime = Runtime(context=runtime_ctx, store=store) config.setdefault("configurable", {})["__pregel_runtime"] = runtime + # Inject RunJournal as a LangChain callback handler. + # on_llm_end captures token usage; on_chain_start/end captures lifecycle. + if journal is not None: + config.setdefault("callbacks", []).append(journal) + runnable_config = RunnableConfig(**config) - agent = agent_factory(config=runnable_config) + if ctx.app_config is not None and _agent_factory_supports_app_config(agent_factory): + agent = agent_factory(config=runnable_config, app_config=ctx.app_config) + else: + agent = agent_factory(config=runnable_config) # 4. Attach checkpointer and store if checkpointer is not None: @@ -236,6 +332,41 @@ async def run_agent( ) finally: + # Flush any buffered journal events and persist completion data + if journal is not None: + try: + await journal.flush() + except Exception: + logger.warning("Failed to flush journal for run %s", run_id, exc_info=True) + + try: + # Persist token usage + convenience fields to RunStore + completion = journal.get_completion_data() + await run_manager.update_run_completion(run_id, status=record.status.value, **completion) + except Exception: + logger.warning("Failed to persist run completion for %s (non-fatal)", run_id, exc_info=True) + + # Sync title from checkpoint to threads_meta.display_name + if checkpointer is not None and thread_store is not None: + try: + ckpt_config = {"configurable": {"thread_id": thread_id, "checkpoint_ns": ""}} + ckpt_tuple = await checkpointer.aget_tuple(ckpt_config) + if ckpt_tuple is not None: + ckpt = getattr(ckpt_tuple, "checkpoint", {}) or {} + title = ckpt.get("channel_values", {}).get("title") + if title: + await thread_store.update_display_name(thread_id, title) + except Exception: + logger.debug("Failed to sync title for thread %s (non-fatal)", thread_id) + + # Update threads_meta status based on run outcome + if thread_store is not None: + try: + final_status = "idle" if record.status == RunStatus.success else record.status.value + await thread_store.update_status(thread_id, final_status) + except Exception: + logger.debug("Failed to update thread_meta status for %s (non-fatal)", thread_id) + await bridge.publish_end(run_id) asyncio.create_task(bridge.cleanup(run_id, delay=60)) @@ -355,6 +486,31 @@ def _lg_mode_to_sse_event(mode: str) -> str: return mode +def _extract_human_message(graph_input: dict) -> HumanMessage | None: + """Extract or construct a HumanMessage from graph_input for event recording. + + Returns a LangChain HumanMessage so callers can use .model_dump() to get + the checkpoint-aligned serialization format. + """ + from langchain_core.messages import HumanMessage + + messages = graph_input.get("messages") + if not messages: + return None + last = messages[-1] if isinstance(messages, list) else messages + if isinstance(last, HumanMessage): + return last + if isinstance(last, str): + return HumanMessage(content=last) if last else None + if hasattr(last, "content"): + content = last.content + return HumanMessage(content=content) + if isinstance(last, dict): + content = last.get("content", "") + return HumanMessage(content=content) if content else None + return None + + def _unpack_stream_item( item: Any, lg_modes: list[str], diff --git a/backend/packages/harness/deerflow/runtime/store/async_provider.py b/backend/packages/harness/deerflow/runtime/store/async_provider.py index bc7a60559..bc1f07eba 100644 --- a/backend/packages/harness/deerflow/runtime/store/async_provider.py +++ b/backend/packages/harness/deerflow/runtime/store/async_provider.py @@ -23,7 +23,7 @@ from collections.abc import AsyncIterator from langgraph.store.base import BaseStore -from deerflow.config.app_config import get_app_config +from deerflow.config.app_config import AppConfig, get_app_config from deerflow.runtime.store.provider import POSTGRES_CONN_REQUIRED, POSTGRES_STORE_INSTALL, SQLITE_STORE_INSTALL, ensure_sqlite_parent_dir, resolve_sqlite_conn_str logger = logging.getLogger(__name__) @@ -86,28 +86,29 @@ async def _async_store(config) -> AsyncIterator[BaseStore]: @contextlib.asynccontextmanager -async def make_store() -> AsyncIterator[BaseStore]: +async def make_store(app_config: AppConfig | None = None) -> AsyncIterator[BaseStore]: """Async context manager that yields a Store whose backend matches the configured checkpointer. Reads from the same ``checkpointer`` section of *config.yaml* used by - :func:`deerflow.agents.checkpointer.async_provider.make_checkpointer` so + :func:`deerflow.runtime.checkpointer.async_provider.make_checkpointer` so that both singletons always use the same persistence technology:: - async with make_store() as store: + async with make_store(app_config) as store: app.state.store = store Yields an :class:`~langgraph.store.memory.InMemoryStore` when no ``checkpointer`` section is configured (emits a WARNING in that case). """ - config = get_app_config() + if app_config is None: + app_config = get_app_config() - if config.checkpointer is None: + if app_config.checkpointer is None: from langgraph.store.memory import InMemoryStore logger.warning("No 'checkpointer' section in config.yaml — using InMemoryStore for the store. Thread list will be lost on server restart. Configure a sqlite or postgres backend for persistence.") yield InMemoryStore() return - async with _async_store(config.checkpointer) as store: + async with _async_store(app_config.checkpointer) as store: yield store diff --git a/backend/packages/harness/deerflow/runtime/stream_bridge/async_provider.py b/backend/packages/harness/deerflow/runtime/stream_bridge/async_provider.py index 891f79fa0..929e29977 100644 --- a/backend/packages/harness/deerflow/runtime/stream_bridge/async_provider.py +++ b/backend/packages/harness/deerflow/runtime/stream_bridge/async_provider.py @@ -1,7 +1,7 @@ """Async stream bridge factory. Provides an **async context manager** aligned with -:func:`deerflow.agents.checkpointer.async_provider.make_checkpointer`. +:func:`deerflow.runtime.checkpointer.async_provider.make_checkpointer`. Usage (e.g. FastAPI lifespan):: @@ -17,6 +17,7 @@ import contextlib import logging from collections.abc import AsyncIterator +from deerflow.config.app_config import AppConfig from deerflow.config.stream_bridge_config import get_stream_bridge_config from .base import StreamBridge @@ -25,14 +26,16 @@ logger = logging.getLogger(__name__) @contextlib.asynccontextmanager -async def make_stream_bridge(config=None) -> AsyncIterator[StreamBridge]: +async def make_stream_bridge(app_config: AppConfig | None = None) -> AsyncIterator[StreamBridge]: """Async context manager that yields a :class:`StreamBridge`. Falls back to :class:`MemoryStreamBridge` when no configuration is provided and nothing is set globally. """ - if config is None: + if app_config is None: config = get_stream_bridge_config() + else: + config = app_config.stream_bridge if config is None or config.type == "memory": from deerflow.runtime.stream_bridge.memory import MemoryStreamBridge diff --git a/backend/packages/harness/deerflow/runtime/user_context.py b/backend/packages/harness/deerflow/runtime/user_context.py new file mode 100644 index 000000000..ffe4be690 --- /dev/null +++ b/backend/packages/harness/deerflow/runtime/user_context.py @@ -0,0 +1,167 @@ +"""Request-scoped user context for user-based authorization. + +This module holds a :class:`~contextvars.ContextVar` that the gateway's +auth middleware sets after a successful authentication. Repository +methods read the contextvar via a sentinel default parameter, letting +routers stay free of ``user_id`` boilerplate. + +Three-state semantics for the repository ``user_id`` parameter (the +consumer side of this module lives in ``deerflow.persistence.*``): + +- ``_AUTO`` (module-private sentinel, default): read from contextvar; + raise :class:`RuntimeError` if unset. +- Explicit ``str``: use the provided value, overriding contextvar. +- Explicit ``None``: no WHERE clause — used only by migration scripts + and admin CLIs that intentionally bypass isolation. + +Dependency direction +-------------------- +``persistence`` (lower layer) reads from this module; ``gateway.auth`` +(higher layer) writes to it. ``CurrentUser`` is defined here as a +:class:`typing.Protocol` so that ``persistence`` never needs to import +the concrete ``User`` class from ``gateway.auth.models``. Any object +with an ``.id: str`` attribute structurally satisfies the protocol. + +Asyncio semantics +----------------- +``ContextVar`` is task-local under asyncio, not thread-local. Each +FastAPI request runs in its own task, so the context is naturally +isolated. ``asyncio.create_task`` and ``asyncio.to_thread`` inherit the +parent task's context, which is typically the intended behaviour; if +a background task must *not* see the foreground user, wrap it with +``contextvars.copy_context()`` to get a clean copy. +""" + +from __future__ import annotations + +from contextvars import ContextVar, Token +from typing import Final, Protocol, runtime_checkable + + +@runtime_checkable +class CurrentUser(Protocol): + """Structural type for the current authenticated user. + + Any object with an ``.id: str`` attribute satisfies this protocol. + Concrete implementations live in ``app.gateway.auth.models.User``. + """ + + id: str + + +_current_user: Final[ContextVar[CurrentUser | None]] = ContextVar("deerflow_current_user", default=None) + + +def set_current_user(user: CurrentUser) -> Token[CurrentUser | None]: + """Set the current user for this async task. + + Returns a reset token that should be passed to + :func:`reset_current_user` in a ``finally`` block to restore the + previous context. + """ + return _current_user.set(user) + + +def reset_current_user(token: Token[CurrentUser | None]) -> None: + """Restore the context to the state captured by ``token``.""" + _current_user.reset(token) + + +def get_current_user() -> CurrentUser | None: + """Return the current user, or ``None`` if unset. + + Safe to call in any context. Used by code paths that can proceed + without a user (e.g. migration scripts, public endpoints). + """ + return _current_user.get() + + +def require_current_user() -> CurrentUser: + """Return the current user, or raise :class:`RuntimeError`. + + Used by repository code that must not be called outside a + request-authenticated context. The error message is phrased so + that a caller debugging a stack trace can locate the offending + code path. + """ + user = _current_user.get() + if user is None: + raise RuntimeError("repository accessed without user context") + return user + + +# --------------------------------------------------------------------------- +# Effective user_id helpers (filesystem isolation) +# --------------------------------------------------------------------------- + +DEFAULT_USER_ID: Final[str] = "default" + + +def get_effective_user_id() -> str: + """Return the current user's id as a string, or DEFAULT_USER_ID if unset. + + Unlike :func:`require_current_user` this never raises — it is designed + for filesystem-path resolution where a valid user bucket is always needed. + """ + user = _current_user.get() + if user is None: + return DEFAULT_USER_ID + return str(user.id) + + +# --------------------------------------------------------------------------- +# Sentinel-based user_id resolution +# --------------------------------------------------------------------------- +# +# Repository methods accept a ``user_id`` keyword-only argument that +# defaults to ``AUTO``. The three possible values drive distinct +# behaviours; see the docstring on :func:`resolve_user_id`. + + +class _AutoSentinel: + """Singleton marker meaning 'resolve user_id from contextvar'.""" + + _instance: _AutoSentinel | None = None + + def __new__(cls) -> _AutoSentinel: + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def __repr__(self) -> str: + return "" + + +AUTO: Final[_AutoSentinel] = _AutoSentinel() + + +def resolve_user_id( + value: str | None | _AutoSentinel, + *, + method_name: str = "repository method", +) -> str | None: + """Resolve the user_id parameter passed to a repository method. + + Three-state semantics: + + - :data:`AUTO` (default): read from contextvar; raise + :class:`RuntimeError` if no user is in context. This is the + common case for request-scoped calls. + - Explicit ``str``: use the provided id verbatim, overriding any + contextvar value. Useful for tests and admin-override flows. + - Explicit ``None``: no filter — the repository should skip the + user_id WHERE clause entirely. Reserved for migration scripts + and CLI tools that intentionally bypass isolation. + """ + if isinstance(value, _AutoSentinel): + user = _current_user.get() + if user is None: + raise RuntimeError(f"{method_name} called with user_id=AUTO but no user context is set; pass an explicit user_id, set the contextvar via auth middleware, or opt out with user_id=None for migration/CLI paths.") + # Coerce to ``str`` at the boundary: ``User.id`` is typed as + # ``UUID`` for the API surface, but the persistence layer + # stores ``user_id`` as ``String(64)`` and aiosqlite cannot + # bind a raw UUID object to a VARCHAR column ("type 'UUID' is + # not supported"). Honour the documented return type here + # rather than ripple a type change through every caller. + return str(user.id) + return value diff --git a/backend/packages/harness/deerflow/sandbox/local/list_dir.py b/backend/packages/harness/deerflow/sandbox/local/list_dir.py index b1031d340..35e51f848 100644 --- a/backend/packages/harness/deerflow/sandbox/local/list_dir.py +++ b/backend/packages/harness/deerflow/sandbox/local/list_dir.py @@ -22,6 +22,13 @@ def list_dir(path: str, max_depth: int = 2) -> list[str]: if not root_path.is_dir(): return result + def _is_within_root(candidate: Path) -> bool: + try: + candidate.relative_to(root_path) + return True + except ValueError: + return False + def _traverse(current_path: Path, current_depth: int) -> None: """Recursively traverse directories up to max_depth.""" if current_depth > max_depth: @@ -32,8 +39,23 @@ def list_dir(path: str, max_depth: int = 2) -> list[str]: if should_ignore_name(item.name): continue + if item.is_symlink(): + try: + item_resolved = item.resolve() + if not _is_within_root(item_resolved): + continue + except OSError: + continue + post_fix = "/" if item_resolved.is_dir() else "" + result.append(str(item_resolved) + post_fix) + continue + + item_resolved = item.resolve() + if not _is_within_root(item_resolved): + continue + post_fix = "/" if item.is_dir() else "" - result.append(str(item.resolve()) + post_fix) + result.append(str(item_resolved) + post_fix) # Recurse into subdirectories if not at max depth if item.is_dir() and current_depth < max_depth: diff --git a/backend/packages/harness/deerflow/sandbox/local/local_sandbox.py b/backend/packages/harness/deerflow/sandbox/local/local_sandbox.py index 2da0a678f..116a62159 100644 --- a/backend/packages/harness/deerflow/sandbox/local/local_sandbox.py +++ b/backend/packages/harness/deerflow/sandbox/local/local_sandbox.py @@ -5,6 +5,7 @@ import shutil import subprocess from dataclasses import dataclass from pathlib import Path +from typing import NamedTuple from deerflow.sandbox.local.list_dir import list_dir from deerflow.sandbox.sandbox import Sandbox @@ -20,6 +21,11 @@ class PathMapping: read_only: bool = False +class ResolvedPath(NamedTuple): + path: str + mapping: PathMapping | None + + class LocalSandbox(Sandbox): @staticmethod def _shell_name(shell: str) -> str: @@ -91,7 +97,23 @@ class LocalSandbox(Sandbox): return best_mapping.read_only - def _resolve_path(self, path: str) -> str: + def _find_path_mapping(self, path: str) -> tuple[PathMapping, str] | None: + path_str = str(path) + + for mapping in sorted(self.path_mappings, key=lambda m: len(m.container_path.rstrip("/") or "/"), reverse=True): + container_path = mapping.container_path.rstrip("/") or "/" + if container_path == "/": + if path_str.startswith("/"): + return mapping, path_str.lstrip("/") + continue + + if path_str == container_path or path_str.startswith(container_path + "/"): + relative = path_str[len(container_path) :].lstrip("/") + return mapping, relative + + return None + + def _resolve_path_with_mapping(self, path: str) -> ResolvedPath: """ Resolve container path to actual local path using mappings. @@ -99,22 +121,30 @@ class LocalSandbox(Sandbox): path: Path that might be a container path Returns: - Resolved local path + Resolved local path and the matched mapping, if any """ path_str = str(path) - # Try each mapping (longest prefix first for more specific matches) - for mapping in sorted(self.path_mappings, key=lambda m: len(m.container_path), reverse=True): - container_path = mapping.container_path - local_path = mapping.local_path - if path_str == container_path or path_str.startswith(container_path + "/"): - # Replace the container path prefix with local path - relative = path_str[len(container_path) :].lstrip("/") - resolved = str(Path(local_path) / relative) if relative else local_path - return resolved + mapping_match = self._find_path_mapping(path_str) + if mapping_match is None: + return ResolvedPath(path_str, None) - # No mapping found, return original path - return path_str + mapping, relative = mapping_match + local_root = Path(mapping.local_path).resolve() + resolved_path = (local_root / relative).resolve() if relative else local_root + + try: + resolved_path.relative_to(local_root) + except ValueError as exc: + raise PermissionError(errno.EACCES, "Access denied: path escapes mounted directory", path_str) from exc + + return ResolvedPath(str(resolved_path), mapping) + + def _resolve_path(self, path: str) -> str: + return self._resolve_path_with_mapping(path).path + + def _is_resolved_path_read_only(self, resolved: ResolvedPath) -> bool: + return bool(resolved.mapping and resolved.mapping.read_only) or self._is_read_only_path(resolved.path) def _reverse_resolve_path(self, path: str) -> str: """ @@ -288,10 +318,10 @@ class LocalSandbox(Sandbox): timeout=600, ) else: + args = [shell, "-c", resolved_command] result = subprocess.run( - resolved_command, - executable=shell, - shell=True, + args, + shell=False, capture_output=True, text=True, timeout=600, @@ -309,8 +339,14 @@ class LocalSandbox(Sandbox): def list_dir(self, path: str, max_depth=2) -> list[str]: resolved_path = self._resolve_path(path) entries = list_dir(resolved_path, max_depth) - # Reverse resolve local paths back to container paths in output - return [self._reverse_resolve_paths_in_output(entry) for entry in entries] + # Reverse resolve local paths back to container paths and preserve + # list_dir's trailing "/" marker for directories. + result: list[str] = [] + for entry in entries: + is_dir = entry.endswith(("/", "\\")) + reversed_entry = self._reverse_resolve_path(entry.rstrip("/\\")) if is_dir else self._reverse_resolve_path(entry) + result.append(f"{reversed_entry}/" if is_dir and not reversed_entry.endswith("/") else reversed_entry) + return result def read_file(self, path: str) -> str: resolved_path = self._resolve_path(path) @@ -329,8 +365,9 @@ class LocalSandbox(Sandbox): raise type(e)(e.errno, e.strerror, path) from None def write_file(self, path: str, content: str, append: bool = False) -> None: - resolved_path = self._resolve_path(path) - if self._is_read_only_path(resolved_path): + resolved = self._resolve_path_with_mapping(path) + resolved_path = resolved.path + if self._is_resolved_path_read_only(resolved): raise OSError(errno.EROFS, "Read-only file system", path) try: dir_path = os.path.dirname(resolved_path) @@ -384,8 +421,9 @@ class LocalSandbox(Sandbox): ], truncated def update_file(self, path: str, content: bytes) -> None: - resolved_path = self._resolve_path(path) - if self._is_read_only_path(resolved_path): + resolved = self._resolve_path_with_mapping(path) + resolved_path = resolved.path + if self._is_resolved_path_read_only(resolved): raise OSError(errno.EROFS, "Read-only file system", path) try: dir_path = os.path.dirname(resolved_path) diff --git a/backend/packages/harness/deerflow/sandbox/tools.py b/backend/packages/harness/deerflow/sandbox/tools.py index 7b09358e7..32ee7d646 100644 --- a/backend/packages/harness/deerflow/sandbox/tools.py +++ b/backend/packages/harness/deerflow/sandbox/tools.py @@ -22,6 +22,9 @@ from deerflow.sandbox.security import LOCAL_HOST_BASH_DISABLED_MESSAGE, is_host_ _ABSOLUTE_PATH_PATTERN = re.compile(r"(?()]+)") _FILE_URL_PATTERN = re.compile(r"\bfile://\S+", re.IGNORECASE) +_URL_WITH_SCHEME_PATTERN = re.compile(r"^[a-z][a-z0-9+.-]*://", re.IGNORECASE) +_URL_IN_COMMAND_PATTERN = re.compile(r"\b[a-z][a-z0-9+.-]*://[^\s\"'`;&|<>()]+", re.IGNORECASE) +_DOTDOT_PATH_SEGMENT_PATTERN = re.compile(r"(?:^|[/\\=])\.\.(?:$|[/\\])") _LOCAL_BASH_SYSTEM_PATH_PREFIXES = ( "/bin/", "/usr/bin/", @@ -37,6 +40,42 @@ _DEFAULT_GLOB_MAX_RESULTS = 200 _MAX_GLOB_MAX_RESULTS = 1000 _DEFAULT_GREP_MAX_RESULTS = 100 _MAX_GREP_MAX_RESULTS = 500 +_LOCAL_BASH_CWD_COMMANDS = {"cd", "pushd"} +_LOCAL_BASH_COMMAND_WRAPPERS = {"command", "builtin"} +_LOCAL_BASH_COMMAND_PREFIX_KEYWORDS = {"!", "{", "case", "do", "elif", "else", "for", "if", "select", "then", "time", "until", "while"} +_LOCAL_BASH_COMMAND_END_KEYWORDS = {"}", "done", "esac", "fi"} +_LOCAL_BASH_ROOT_PATH_COMMANDS = { + "awk", + "cat", + "cp", + "du", + "find", + "grep", + "head", + "less", + "ln", + "ls", + "more", + "mv", + "rm", + "sed", + "tail", + "tar", +} +_SHELL_COMMAND_SEPARATORS = {";", "&&", "||", "|", "|&", "&", "(", ")"} +_SHELL_REDIRECTION_OPERATORS = { + "<", + ">", + "<<", + ">>", + "<<<", + "<>", + ">&", + "<&", + "&>", + "&>>", + ">|", +} def _get_skills_container_path() -> str: @@ -200,8 +239,9 @@ def _get_acp_workspace_host_path(thread_id: str | None = None) -> str | None: if thread_id is not None: try: from deerflow.config.paths import get_paths + from deerflow.runtime.user_context import get_effective_user_id - host_path = get_paths().acp_workspace_dir(thread_id) + host_path = get_paths().acp_workspace_dir(thread_id, user_id=get_effective_user_id()) if host_path.exists(): return str(host_path) except Exception: @@ -548,7 +588,7 @@ def validate_local_tool_path(path: str, thread_data: ThreadDataState | None, *, This function is a security gate — it checks whether *path* may be accessed and raises on violation. It does **not** resolve the virtual path to a host path; callers are responsible for resolution via - ``_resolve_and_validate_user_data_path`` or ``_resolve_skills_path``. + ``resolve_and_validate_user_data_path`` or ``_resolve_skills_path``. Allowed virtual-path families: - ``/mnt/user-data/*`` — always allowed (read + write) @@ -635,6 +675,219 @@ def _resolve_and_validate_user_data_path(path: str, thread_data: ThreadDataState return str(resolved) +def _is_non_file_url_token(token: str) -> bool: + """Return True for URL tokens that should not be interpreted as paths.""" + values = [token] + if "=" in token: + values.append(token.split("=", 1)[1]) + + for value in values: + match = _URL_WITH_SCHEME_PATTERN.match(value) + if match and not value.lower().startswith("file://"): + return True + return False + + +def _non_file_url_spans(command: str) -> list[tuple[int, int]]: + spans = [] + for match in _URL_IN_COMMAND_PATTERN.finditer(command): + if not match.group().lower().startswith("file://"): + spans.append(match.span()) + return spans + + +def _is_in_spans(position: int, spans: list[tuple[int, int]]) -> bool: + return any(start <= position < end for start, end in spans) + + +def _has_dotdot_path_segment(token: str) -> bool: + if _is_non_file_url_token(token): + return False + return bool(_DOTDOT_PATH_SEGMENT_PATTERN.search(token)) + + +def _split_shell_tokens(command: str) -> list[str]: + try: + normalized = command.replace("\r\n", "\n").replace("\r", "\n").replace("\n", " ; ") + lexer = shlex.shlex(normalized, posix=True, punctuation_chars=True) + lexer.whitespace_split = True + lexer.commenters = "" + return list(lexer) + except ValueError: + # The shell will reject malformed quoting later; keep validation + # best-effort instead of turning syntax errors into security messages. + return command.split() + + +def _is_shell_command_separator(token: str) -> bool: + return token in _SHELL_COMMAND_SEPARATORS + + +def _is_shell_redirection_operator(token: str) -> bool: + return token in _SHELL_REDIRECTION_OPERATORS + + +def _is_shell_assignment(token: str) -> bool: + name, separator, _ = token.partition("=") + if not separator or not name: + return False + return bool(re.fullmatch(r"[A-Za-z_][A-Za-z0-9_]*", name)) + + +def _is_allowed_local_bash_absolute_path(path: str, allowed_paths: list[str], *, allow_system_paths: bool) -> bool: + # Check for MCP filesystem server allowed paths + if any(path.startswith(allowed_path) or path == allowed_path.rstrip("/") for allowed_path in allowed_paths): + _reject_path_traversal(path) + return True + + if path == VIRTUAL_PATH_PREFIX or path.startswith(f"{VIRTUAL_PATH_PREFIX}/"): + _reject_path_traversal(path) + return True + + # Allow skills container path (resolved by tools.py before passing to sandbox) + if _is_skills_path(path): + _reject_path_traversal(path) + return True + + # Allow ACP workspace path (path-traversal check only) + if _is_acp_workspace_path(path): + _reject_path_traversal(path) + return True + + # Allow custom mount container paths + if _is_custom_mount_path(path): + _reject_path_traversal(path) + return True + + if allow_system_paths and any(path == prefix.rstrip("/") or path.startswith(prefix) for prefix in _LOCAL_BASH_SYSTEM_PATH_PREFIXES): + return True + + return False + + +def _next_cd_target(tokens: list[str], start_index: int) -> tuple[str | None, int]: + index = start_index + while index < len(tokens): + token = tokens[index] + if _is_shell_command_separator(token): + return None, index + if _is_shell_redirection_operator(token): + index += 2 + continue + if token == "--": + index += 1 + continue + if token in {"-L", "-P", "-e", "-@"}: + index += 1 + continue + if token.startswith("-") and token != "-": + index += 1 + continue + return token, index + 1 + return None, index + + +def _validate_local_bash_cwd_target(command_name: str, target: str | None, allowed_paths: list[str]) -> None: + if target is None or target == "-": + raise PermissionError(f"Unsafe working directory change in command: {command_name}. Use paths under {VIRTUAL_PATH_PREFIX}") + if target.startswith(("$", "`")): + raise PermissionError(f"Unsafe working directory change in command: {command_name} {target}. Use paths under {VIRTUAL_PATH_PREFIX}") + if target.startswith("~"): + raise PermissionError(f"Unsafe working directory change in command: {command_name} {target}. Use paths under {VIRTUAL_PATH_PREFIX}") + if target.startswith("/"): + _reject_path_traversal(target) + if not _is_allowed_local_bash_absolute_path(target, allowed_paths, allow_system_paths=False): + raise PermissionError(f"Unsafe working directory change in command: {command_name} {target}. Use paths under {VIRTUAL_PATH_PREFIX}") + + +def _looks_like_unsafe_cwd_target(target: str | None) -> bool: + if target is None: + return False + return target == "-" or target.startswith(("$", "`", "~", "/", "..")) or _has_dotdot_path_segment(target) + + +def _validate_local_bash_root_path_args(command_name: str, tokens: list[str], start_index: int) -> None: + if command_name not in _LOCAL_BASH_ROOT_PATH_COMMANDS: + return + + index = start_index + while index < len(tokens): + token = tokens[index] + if _is_shell_command_separator(token): + return + if _is_shell_redirection_operator(token): + index += 2 + continue + if token == "/" and not _is_non_file_url_token(token): + raise PermissionError(f"Unsafe absolute paths in command: /. Use paths under {VIRTUAL_PATH_PREFIX}") + index += 1 + + +def _validate_local_bash_shell_tokens(command: str, allowed_paths: list[str]) -> None: + """Conservatively reject relative path escapes missed by absolute-path scanning.""" + if re.search(r"\$\([^)]*\b(?:cd|pushd)\b", command): + raise PermissionError(f"Unsafe working directory change in command substitution. Use paths under {VIRTUAL_PATH_PREFIX}") + + tokens = _split_shell_tokens(command) + + for token in tokens: + if _is_shell_command_separator(token) or _is_shell_redirection_operator(token): + continue + if _has_dotdot_path_segment(token): + raise PermissionError("Access denied: path traversal detected") + + at_command_start = True + index = 0 + while index < len(tokens): + token = tokens[index] + + if _is_shell_command_separator(token): + at_command_start = True + index += 1 + continue + + if _is_shell_redirection_operator(token): + index += 1 + continue + + if at_command_start and _is_shell_assignment(token): + index += 1 + continue + + command_name = token.rsplit("/", 1)[-1] + if at_command_start and command_name in _LOCAL_BASH_COMMAND_PREFIX_KEYWORDS | _LOCAL_BASH_COMMAND_END_KEYWORDS: + index += 1 + continue + + if not at_command_start: + index += 1 + continue + + at_command_start = False + if command_name in _LOCAL_BASH_COMMAND_WRAPPERS and index + 1 < len(tokens): + wrapped_name = tokens[index + 1].rsplit("/", 1)[-1] + if wrapped_name in _LOCAL_BASH_CWD_COMMANDS: + target, next_index = _next_cd_target(tokens, index + 2) + _validate_local_bash_cwd_target(wrapped_name, target, allowed_paths) + index = next_index + continue + _validate_local_bash_root_path_args(wrapped_name, tokens, index + 2) + + if command_name not in _LOCAL_BASH_CWD_COMMANDS: + _validate_local_bash_root_path_args(command_name, tokens, index + 1) + index += 1 + continue + + target, next_index = _next_cd_target(tokens, index + 1) + _validate_local_bash_cwd_target(command_name, target, allowed_paths) + index = next_index + + +def resolve_and_validate_user_data_path(path: str, thread_data: ThreadDataState) -> str: + """Resolve a /mnt/user-data virtual path and validate it stays in bounds.""" + return _resolve_and_validate_user_data_path(path, thread_data) + + def validate_local_bash_command_paths(command: str, thread_data: ThreadDataState | None) -> None: """Validate absolute paths in local-sandbox bash commands. @@ -660,33 +913,14 @@ def validate_local_bash_command_paths(command: str, thread_data: ThreadDataState unsafe_paths: list[str] = [] allowed_paths = _get_mcp_allowed_paths() + _validate_local_bash_shell_tokens(command, allowed_paths) + url_spans = _non_file_url_spans(command) - for absolute_path in _ABSOLUTE_PATH_PATTERN.findall(command): - # Check for MCP filesystem server allowed paths - if any(absolute_path.startswith(path) or absolute_path == path.rstrip("/") for path in allowed_paths): - _reject_path_traversal(absolute_path) + for match in _ABSOLUTE_PATH_PATTERN.finditer(command): + if _is_in_spans(match.start(), url_spans): continue - - if absolute_path == VIRTUAL_PATH_PREFIX or absolute_path.startswith(f"{VIRTUAL_PATH_PREFIX}/"): - _reject_path_traversal(absolute_path) - continue - - # Allow skills container path (resolved by tools.py before passing to sandbox) - if _is_skills_path(absolute_path): - _reject_path_traversal(absolute_path) - continue - - # Allow ACP workspace path (path-traversal check only) - if _is_acp_workspace_path(absolute_path): - _reject_path_traversal(absolute_path) - continue - - # Allow custom mount container paths - if _is_custom_mount_path(absolute_path): - _reject_path_traversal(absolute_path) - continue - - if any(absolute_path == prefix.rstrip("/") or absolute_path.startswith(prefix) for prefix in _LOCAL_BASH_SYSTEM_PATH_PREFIXES): + absolute_path = match.group() + if _is_allowed_local_bash_absolute_path(absolute_path, allowed_paths, allow_system_paths=True): continue unsafe_paths.append(absolute_path) diff --git a/backend/packages/harness/deerflow/skills/__init__.py b/backend/packages/harness/deerflow/skills/__init__.py index bbdca0650..77910d5c2 100644 --- a/backend/packages/harness/deerflow/skills/__init__.py +++ b/backend/packages/harness/deerflow/skills/__init__.py @@ -1,14 +1,17 @@ -from .installer import SkillAlreadyExistsError, install_skill_from_archive -from .loader import get_skills_root_path, load_skills +from __future__ import annotations + +from .installer import SkillAlreadyExistsError, SkillSecurityScanError +from .storage import LocalSkillStorage, SkillStorage, get_or_new_skill_storage from .types import Skill from .validation import ALLOWED_FRONTMATTER_PROPERTIES, _validate_skill_frontmatter __all__ = [ - "load_skills", - "get_skills_root_path", "Skill", "ALLOWED_FRONTMATTER_PROPERTIES", "_validate_skill_frontmatter", - "install_skill_from_archive", "SkillAlreadyExistsError", + "SkillSecurityScanError", + "SkillStorage", + "LocalSkillStorage", + "get_or_new_skill_storage", ] diff --git a/backend/packages/harness/deerflow/skills/installer.py b/backend/packages/harness/deerflow/skills/installer.py index f7234336e..536ba0f88 100644 --- a/backend/packages/harness/deerflow/skills/installer.py +++ b/backend/packages/harness/deerflow/skills/installer.py @@ -4,24 +4,31 @@ Pure business logic — no FastAPI/HTTP dependencies. Both Gateway and Client delegate to these functions. """ +import asyncio +import concurrent.futures import logging import posixpath import shutil import stat -import tempfile import zipfile from pathlib import Path, PurePosixPath, PureWindowsPath -from deerflow.skills.loader import get_skills_root_path -from deerflow.skills.validation import _validate_skill_frontmatter +from deerflow.skills.security_scanner import scan_skill_content logger = logging.getLogger(__name__) +_PROMPT_INPUT_DIRS = {"references", "templates"} +_PROMPT_INPUT_SUFFIXES = frozenset({".json", ".markdown", ".md", ".rst", ".txt", ".yaml", ".yml"}) + class SkillAlreadyExistsError(ValueError): """Raised when a skill with the same name is already installed.""" +class SkillSecurityScanError(ValueError): + """Raised when a skill archive fails security scanning.""" + + def is_unsafe_zip_member(info: zipfile.ZipInfo) -> bool: """Return True if the zip member path is absolute or attempts directory traversal.""" name = info.filename @@ -114,70 +121,84 @@ def safe_extract_skill_archive( dst.write(chunk) -def install_skill_from_archive( - zip_path: str | Path, - *, - skills_root: Path | None = None, -) -> dict: - """Install a skill from a .skill archive (ZIP). +def _is_script_support_file(rel_path: Path) -> bool: + return bool(rel_path.parts) and rel_path.parts[0] == "scripts" - Args: - zip_path: Path to the .skill file. - skills_root: Override the skills root directory. If None, uses - the default from config. - Returns: - Dict with success, skill_name, message. +def _should_scan_support_file(rel_path: Path) -> bool: + if _is_script_support_file(rel_path): + return True + return bool(rel_path.parts) and rel_path.parts[0] in _PROMPT_INPUT_DIRS and rel_path.suffix.lower() in _PROMPT_INPUT_SUFFIXES - Raises: - FileNotFoundError: If the file does not exist. - ValueError: If the file is invalid (wrong extension, bad ZIP, - invalid frontmatter, duplicate name). - """ - logger.info("Installing skill from %s", zip_path) - path = Path(zip_path) - if not path.is_file(): - if not path.exists(): - raise FileNotFoundError(f"Skill file not found: {zip_path}") - raise ValueError(f"Path is not a file: {zip_path}") - if path.suffix != ".skill": - raise ValueError("File must have .skill extension") - if skills_root is None: - skills_root = get_skills_root_path() - custom_dir = skills_root / "custom" - custom_dir.mkdir(parents=True, exist_ok=True) +def _move_staged_skill_into_reserved_target(staging_target: Path, target: Path) -> None: + installed = False + reserved = False + try: + target.mkdir(mode=0o700) + reserved = True + for child in staging_target.iterdir(): + shutil.move(str(child), target / child.name) + installed = True + except FileExistsError as e: + raise SkillAlreadyExistsError(f"Skill '{target.name}' already exists") from e + finally: + if reserved and not installed and target.exists(): + shutil.rmtree(target) - with tempfile.TemporaryDirectory() as tmp: - tmp_path = Path(tmp) - try: - zf = zipfile.ZipFile(path, "r") - except FileNotFoundError: - raise FileNotFoundError(f"Skill file not found: {zip_path}") from None - except (zipfile.BadZipFile, IsADirectoryError): - raise ValueError("File is not a valid ZIP archive") from None +async def _scan_skill_file_or_raise(skill_dir: Path, path: Path, skill_name: str, *, executable: bool) -> None: + rel_path = path.relative_to(skill_dir).as_posix() + location = f"{skill_name}/{rel_path}" + try: + content = path.read_text(encoding="utf-8") + except UnicodeDecodeError as e: + raise SkillSecurityScanError(f"Security scan failed for skill '{skill_name}': {location} must be valid UTF-8") from e - with zf: - safe_extract_skill_archive(zf, tmp_path) + try: + result = await scan_skill_content(content, executable=executable, location=location) + except Exception as e: + raise SkillSecurityScanError(f"Security scan failed for {location}: {e}") from e - skill_dir = resolve_skill_dir_from_archive(tmp_path) + decision = getattr(result, "decision", None) + reason = str(getattr(result, "reason", "") or "No reason provided.") + if decision == "block": + if rel_path == "SKILL.md": + raise SkillSecurityScanError(f"Security scan blocked skill '{skill_name}': {reason}") + raise SkillSecurityScanError(f"Security scan blocked {location}: {reason}") + if executable and decision != "allow": + raise SkillSecurityScanError(f"Security scan rejected executable {location}: {reason}") + if decision not in {"allow", "warn"}: + raise SkillSecurityScanError(f"Security scan failed for {location}: invalid scanner decision {decision!r}") - is_valid, message, skill_name = _validate_skill_frontmatter(skill_dir) - if not is_valid: - raise ValueError(f"Invalid skill: {message}") - if not skill_name or "/" in skill_name or "\\" in skill_name or ".." in skill_name: - raise ValueError(f"Invalid skill name: {skill_name}") - target = custom_dir / skill_name - if target.exists(): - raise SkillAlreadyExistsError(f"Skill '{skill_name}' already exists") +async def _scan_skill_archive_contents_or_raise(skill_dir: Path, skill_name: str) -> None: + """Run the skill security scanner against all installable text and script files.""" + skill_md = skill_dir / "SKILL.md" + await _scan_skill_file_or_raise(skill_dir, skill_md, skill_name, executable=False) - shutil.copytree(skill_dir, target) - logger.info("Skill %r installed to %s", skill_name, target) + for path in sorted(skill_dir.rglob("*")): + if not path.is_file(): + continue - return { - "success": True, - "skill_name": skill_name, - "message": f"Skill '{skill_name}' installed successfully", - } + rel_path = path.relative_to(skill_dir) + if rel_path == Path("SKILL.md"): + continue + if path.name == "SKILL.md": + raise SkillSecurityScanError(f"Security scan failed for skill '{skill_name}': nested SKILL.md is not allowed at {skill_name}/{rel_path.as_posix()}") + if not _should_scan_support_file(rel_path): + continue + + await _scan_skill_file_or_raise(skill_dir, path, skill_name, executable=_is_script_support_file(rel_path)) + + +def _run_async_install(coro): + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = None + + if loop is not None and loop.is_running(): + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: + return executor.submit(asyncio.run, coro).result() + return asyncio.run(coro) diff --git a/backend/packages/harness/deerflow/skills/loader.py b/backend/packages/harness/deerflow/skills/loader.py deleted file mode 100644 index 35ffda661..000000000 --- a/backend/packages/harness/deerflow/skills/loader.py +++ /dev/null @@ -1,103 +0,0 @@ -import logging -import os -from pathlib import Path - -from .parser import parse_skill_file -from .types import Skill - -logger = logging.getLogger(__name__) - - -def get_skills_root_path() -> Path: - """ - Get the root path of the skills directory. - - Returns: - Path to the skills directory (deer-flow/skills) - """ - # loader.py lives at packages/harness/deerflow/skills/loader.py — 5 parents up reaches backend/ - backend_dir = Path(__file__).resolve().parent.parent.parent.parent.parent - # skills directory is sibling to backend directory - skills_dir = backend_dir.parent / "skills" - return skills_dir - - -def load_skills(skills_path: Path | None = None, use_config: bool = True, enabled_only: bool = False) -> list[Skill]: - """ - Load all skills from the skills directory. - - Scans both public and custom skill directories, parsing SKILL.md files - to extract metadata. The enabled state is determined by the skills_state_config.json file. - - Args: - skills_path: Optional custom path to skills directory. - If not provided and use_config is True, uses path from config. - Otherwise defaults to deer-flow/skills - use_config: Whether to load skills path from config (default: True) - enabled_only: If True, only return enabled skills (default: False) - - Returns: - List of Skill objects, sorted by name - """ - if skills_path is None: - if use_config: - try: - from deerflow.config import get_app_config - - config = get_app_config() - skills_path = config.skills.get_skills_path() - except Exception: - # Fallback to default if config fails - skills_path = get_skills_root_path() - else: - skills_path = get_skills_root_path() - - if not skills_path.exists(): - return [] - - skills_by_name: dict[str, Skill] = {} - - # Scan public and custom directories - for category in ["public", "custom"]: - category_path = skills_path / category - if not category_path.exists() or not category_path.is_dir(): - continue - - for current_root, dir_names, file_names in os.walk(category_path, followlinks=True): - # Keep traversal deterministic and skip hidden directories. - dir_names[:] = sorted(name for name in dir_names if not name.startswith(".")) - if "SKILL.md" not in file_names: - continue - - skill_file = Path(current_root) / "SKILL.md" - relative_path = skill_file.parent.relative_to(category_path) - - skill = parse_skill_file(skill_file, category=category, relative_path=relative_path) - if skill: - skills_by_name[skill.name] = skill - - skills = list(skills_by_name.values()) - - # Load skills state configuration and update enabled status - # NOTE: We use ExtensionsConfig.from_file() instead of get_extensions_config() - # to always read the latest configuration from disk. This ensures that changes - # made through the Gateway API (which runs in a separate process) are immediately - # reflected in the LangGraph Server when loading skills. - try: - from deerflow.config.extensions_config import ExtensionsConfig - - extensions_config = ExtensionsConfig.from_file() - for skill in skills: - skill.enabled = extensions_config.is_skill_enabled(skill.name, skill.category) - except Exception as e: - # If config loading fails, default to all enabled - logger.warning("Failed to load extensions config: %s", e) - - # Filter by enabled status if requested - if enabled_only: - skills = [skill for skill in skills if skill.enabled] - - # Sort by name for consistent ordering - skills.sort(key=lambda s: s.name) - - return skills diff --git a/backend/packages/harness/deerflow/skills/manager.py b/backend/packages/harness/deerflow/skills/manager.py deleted file mode 100644 index 77789937a..000000000 --- a/backend/packages/harness/deerflow/skills/manager.py +++ /dev/null @@ -1,159 +0,0 @@ -"""Utilities for managing custom skills and their history.""" - -from __future__ import annotations - -import json -import re -import tempfile -from datetime import UTC, datetime -from pathlib import Path -from typing import Any - -from deerflow.config import get_app_config -from deerflow.skills.loader import load_skills -from deerflow.skills.validation import _validate_skill_frontmatter - -SKILL_FILE_NAME = "SKILL.md" -HISTORY_FILE_NAME = "HISTORY.jsonl" -HISTORY_DIR_NAME = ".history" -ALLOWED_SUPPORT_SUBDIRS = {"references", "templates", "scripts", "assets"} -_SKILL_NAME_PATTERN = re.compile(r"^[a-z0-9]+(?:-[a-z0-9]+)*$") - - -def get_skills_root_dir() -> Path: - return get_app_config().skills.get_skills_path() - - -def get_public_skills_dir() -> Path: - return get_skills_root_dir() / "public" - - -def get_custom_skills_dir() -> Path: - path = get_skills_root_dir() / "custom" - path.mkdir(parents=True, exist_ok=True) - return path - - -def validate_skill_name(name: str) -> str: - normalized = name.strip() - if not _SKILL_NAME_PATTERN.fullmatch(normalized): - raise ValueError("Skill name must be hyphen-case using lowercase letters, digits, and hyphens only.") - if len(normalized) > 64: - raise ValueError("Skill name must be 64 characters or fewer.") - return normalized - - -def get_custom_skill_dir(name: str) -> Path: - return get_custom_skills_dir() / validate_skill_name(name) - - -def get_custom_skill_file(name: str) -> Path: - return get_custom_skill_dir(name) / SKILL_FILE_NAME - - -def get_custom_skill_history_dir() -> Path: - path = get_custom_skills_dir() / HISTORY_DIR_NAME - path.mkdir(parents=True, exist_ok=True) - return path - - -def get_skill_history_file(name: str) -> Path: - return get_custom_skill_history_dir() / f"{validate_skill_name(name)}.jsonl" - - -def get_public_skill_dir(name: str) -> Path: - return get_public_skills_dir() / validate_skill_name(name) - - -def custom_skill_exists(name: str) -> bool: - return get_custom_skill_file(name).exists() - - -def public_skill_exists(name: str) -> bool: - return (get_public_skill_dir(name) / SKILL_FILE_NAME).exists() - - -def ensure_custom_skill_is_editable(name: str) -> None: - if custom_skill_exists(name): - return - if public_skill_exists(name): - raise ValueError(f"'{name}' is a built-in skill. To customise it, create a new skill with the same name under skills/custom/.") - raise FileNotFoundError(f"Custom skill '{name}' not found.") - - -def ensure_safe_support_path(name: str, relative_path: str) -> Path: - skill_dir = get_custom_skill_dir(name).resolve() - if not relative_path or relative_path.endswith("/"): - raise ValueError("Supporting file path must include a filename.") - relative = Path(relative_path) - if relative.is_absolute(): - raise ValueError("Supporting file path must be relative.") - if any(part in {"..", ""} for part in relative.parts): - raise ValueError("Supporting file path must not contain parent-directory traversal.") - - top_level = relative.parts[0] if relative.parts else "" - if top_level not in ALLOWED_SUPPORT_SUBDIRS: - raise ValueError(f"Supporting files must live under one of: {', '.join(sorted(ALLOWED_SUPPORT_SUBDIRS))}.") - - target = (skill_dir / relative).resolve() - allowed_root = (skill_dir / top_level).resolve() - try: - target.relative_to(allowed_root) - except ValueError as exc: - raise ValueError("Supporting file path must stay within the selected support directory.") from exc - return target - - -def validate_skill_markdown_content(name: str, content: str) -> None: - with tempfile.TemporaryDirectory() as tmp_dir: - temp_skill_dir = Path(tmp_dir) / validate_skill_name(name) - temp_skill_dir.mkdir(parents=True, exist_ok=True) - (temp_skill_dir / SKILL_FILE_NAME).write_text(content, encoding="utf-8") - is_valid, message, parsed_name = _validate_skill_frontmatter(temp_skill_dir) - if not is_valid: - raise ValueError(message) - if parsed_name != name: - raise ValueError(f"Frontmatter name '{parsed_name}' must match requested skill name '{name}'.") - - -def atomic_write(path: Path, content: str) -> None: - path.parent.mkdir(parents=True, exist_ok=True) - with tempfile.NamedTemporaryFile("w", encoding="utf-8", delete=False, dir=str(path.parent)) as tmp_file: - tmp_file.write(content) - tmp_path = Path(tmp_file.name) - tmp_path.replace(path) - - -def append_history(name: str, record: dict[str, Any]) -> None: - history_path = get_skill_history_file(name) - history_path.parent.mkdir(parents=True, exist_ok=True) - payload = { - "ts": datetime.now(UTC).isoformat(), - **record, - } - with history_path.open("a", encoding="utf-8") as f: - f.write(json.dumps(payload, ensure_ascii=False)) - f.write("\n") - - -def read_history(name: str) -> list[dict[str, Any]]: - history_path = get_skill_history_file(name) - if not history_path.exists(): - return [] - records: list[dict[str, Any]] = [] - for line in history_path.read_text(encoding="utf-8").splitlines(): - if not line.strip(): - continue - records.append(json.loads(line)) - return records - - -def list_custom_skills() -> list: - return [skill for skill in load_skills(enabled_only=False) if skill.category == "custom"] - - -def read_custom_skill_content(name: str) -> str: - skill_file = get_custom_skill_file(name) - if not skill_file.exists(): - raise FileNotFoundError(f"Custom skill '{name}' not found.") - return skill_file.read_text(encoding="utf-8") diff --git a/backend/packages/harness/deerflow/skills/parser.py b/backend/packages/harness/deerflow/skills/parser.py index 63bcfef7c..b5f56488a 100644 --- a/backend/packages/harness/deerflow/skills/parser.py +++ b/backend/packages/harness/deerflow/skills/parser.py @@ -4,24 +4,24 @@ from pathlib import Path import yaml -from .types import Skill +from .types import SKILL_MD_FILE, Skill, SkillCategory logger = logging.getLogger(__name__) -def parse_skill_file(skill_file: Path, category: str, relative_path: Path | None = None) -> Skill | None: +def parse_skill_file(skill_file: Path, category: SkillCategory, relative_path: Path | None = None) -> Skill | None: """Parse a SKILL.md file and extract metadata. Args: skill_file: Path to the SKILL.md file. - category: Category of the skill ('public' or 'custom'). + category: Category of the skill. relative_path: Relative path from the category root to the skill directory. Defaults to the skill directory name when omitted. Returns: Skill object if parsing succeeds, None otherwise. """ - if not skill_file.exists() or skill_file.name != "SKILL.md": + if not skill_file.exists() or skill_file.name != SKILL_MD_FILE: return None try: diff --git a/backend/packages/harness/deerflow/skills/security_scanner.py b/backend/packages/harness/deerflow/skills/security_scanner.py index 51986cc71..3bddb018f 100644 --- a/backend/packages/harness/deerflow/skills/security_scanner.py +++ b/backend/packages/harness/deerflow/skills/security_scanner.py @@ -8,7 +8,9 @@ import re from dataclasses import dataclass from deerflow.config import get_app_config +from deerflow.config.app_config import AppConfig from deerflow.models import create_chat_model +from deerflow.skills.types import SKILL_MD_FILE logger = logging.getLogger(__name__) @@ -35,7 +37,7 @@ def _extract_json_object(raw: str) -> dict | None: return None -async def scan_skill_content(content: str, *, executable: bool = False, location: str = "SKILL.md") -> ScanResult: +async def scan_skill_content(content: str, *, executable: bool = False, location: str = SKILL_MD_FILE, app_config: AppConfig | None = None) -> ScanResult: """Screen skill content before it is written to disk.""" rubric = ( "You are a security reviewer for AI agent skills. " @@ -47,14 +49,15 @@ async def scan_skill_content(content: str, *, executable: bool = False, location prompt = f"Location: {location}\nExecutable: {str(executable).lower()}\n\nReview this content:\n-----\n{content}\n-----" try: - config = get_app_config() + config = app_config or get_app_config() model_name = config.skill_evolution.moderation_model_name - model = create_chat_model(name=model_name, thinking_enabled=False) if model_name else create_chat_model(thinking_enabled=False) + model = create_chat_model(name=model_name, thinking_enabled=False, app_config=config) if model_name else create_chat_model(thinking_enabled=False, app_config=config) response = await model.ainvoke( [ {"role": "system", "content": rubric}, {"role": "user", "content": prompt}, - ] + ], + config={"run_name": "security_agent"}, ) parsed = _extract_json_object(str(getattr(response, "content", "") or "")) if parsed and parsed.get("decision") in {"allow", "warn", "block"}: diff --git a/backend/packages/harness/deerflow/skills/storage/__init__.py b/backend/packages/harness/deerflow/skills/storage/__init__.py new file mode 100644 index 000000000..044d7adae --- /dev/null +++ b/backend/packages/harness/deerflow/skills/storage/__init__.py @@ -0,0 +1,83 @@ +"""SkillStorage singleton + reflection-based factory. + +Mirrors the pattern used by ``deerflow/sandbox/sandbox_provider.py``. +""" + +from __future__ import annotations + +from deerflow.skills.storage.local_skill_storage import LocalSkillStorage +from deerflow.skills.storage.skill_storage import SkillStorage + +_default_skill_storage: SkillStorage | None = None +_default_skill_storage_config: object | None = None # AppConfig identity the singleton was built from + + +def get_or_new_skill_storage(**kwargs) -> SkillStorage: + """Return a ``SkillStorage`` instance — either a new one or the process singleton. + + **New instance** is created (never cached) when: + - ``skills_path`` is provided — uses it as the ``host_path`` override (class still resolved via config). + - ``app_config`` is provided — constructs a storage from ``app_config.skills`` + so that per-request config (e.g. Gateway ``Depends(get_config)``) is respected + without polluting the process-level singleton. + + **Singleton** is returned (created on first call, then reused) when neither + ``skills_path`` nor ``app_config`` is given — uses ``get_app_config()`` to + resolve the active configuration. + """ + global _default_skill_storage, _default_skill_storage_config + + from deerflow.config import get_app_config + from deerflow.config.skills_config import SkillsConfig + + def _make_storage(skills_config: SkillsConfig, *, host_path: str | None = None, **kwargs) -> SkillStorage: + from deerflow.reflection import resolve_class + + cls = resolve_class(skills_config.use, SkillStorage) + return cls( + host_path=host_path if host_path is not None else str(skills_config.get_skills_path()), + container_path=skills_config.container_path, + **kwargs, + ) + + skills_path = kwargs.pop("skills_path", None) + app_config = kwargs.pop("app_config", None) + + if skills_path is not None: + if app_config is not None: + return _make_storage(app_config.skills, host_path=str(skills_path), **kwargs) + # No app_config: use a default SkillsConfig so we never need to read config.yaml + # when the caller has already supplied an explicit host path. + from deerflow.config.skills_config import SkillsConfig + + return _make_storage(SkillsConfig(), host_path=str(skills_path), **kwargs) + + if app_config is not None: + return _make_storage(app_config.skills, **kwargs) + + # If the singleton was manually injected (e.g. in tests) without a config + # identity (_default_skill_storage_config is None), skip get_app_config() + # entirely to avoid requiring a config.yaml on disk. + if _default_skill_storage is not None and _default_skill_storage_config is None: + return _default_skill_storage + + app_config_now = get_app_config() + if _default_skill_storage is None or _default_skill_storage_config is not app_config_now: + _default_skill_storage = _make_storage(app_config_now.skills, **kwargs) + _default_skill_storage_config = app_config_now + return _default_skill_storage + + +def reset_skill_storage() -> None: + """Clear the cached singleton (used in tests and hot-reload scenarios).""" + global _default_skill_storage, _default_skill_storage_config + _default_skill_storage = None + _default_skill_storage_config = None + + +__all__ = [ + "LocalSkillStorage", + "SkillStorage", + "get_or_new_skill_storage", + "reset_skill_storage", +] diff --git a/backend/packages/harness/deerflow/skills/storage/local_skill_storage.py b/backend/packages/harness/deerflow/skills/storage/local_skill_storage.py new file mode 100644 index 000000000..047cd6163 --- /dev/null +++ b/backend/packages/harness/deerflow/skills/storage/local_skill_storage.py @@ -0,0 +1,198 @@ +"""Local-filesystem implementation of ``SkillStorage``.""" + +from __future__ import annotations + +import errno +import json +import logging +import os +import shutil +import tempfile +from collections.abc import Iterable +from datetime import UTC, datetime +from pathlib import Path + +from deerflow.config.skills_config import _default_repo_root +from deerflow.skills.storage.skill_storage import SKILL_MD_FILE, SkillStorage +from deerflow.skills.types import SkillCategory + +logger = logging.getLogger(__name__) + +DEFAULT_SKILLS_CONTAINER_PATH = "/mnt/skills" + + +class LocalSkillStorage(SkillStorage): + """Skill storage backed by the local filesystem. + + Layout:: + + /public//SKILL.md + /custom//SKILL.md + /custom/.history/.jsonl + """ + + def __init__( + self, + host_path: str | None = None, + container_path: str = DEFAULT_SKILLS_CONTAINER_PATH, + app_config=None, + ) -> None: + super().__init__(container_path=container_path) + if host_path is None: + from deerflow.config import get_app_config + + config = app_config or get_app_config() + self._host_root: Path = config.skills.get_skills_path() + else: + path = Path(host_path) + if not path.is_absolute(): + path = _default_repo_root() / path + self._host_root = path.resolve() + + # ------------------------------------------------------------------ + # Abstract operation implementations + # ------------------------------------------------------------------ + + def get_skills_root_path(self) -> Path: + return self._host_root + + def custom_skill_exists(self, name: str) -> bool: + return self.get_custom_skill_file(name).exists() + + def public_skill_exists(self, name: str) -> bool: + normalized_name = self.validate_skill_name(name) + return (self._host_root / SkillCategory.PUBLIC.value / normalized_name / SKILL_MD_FILE).exists() + + def _iter_skill_files(self) -> Iterable[tuple[SkillCategory, Path, Path]]: + if not self._host_root.exists(): + return + for category in SkillCategory: + category_path = self._host_root / category.value + if not category_path.exists() or not category_path.is_dir(): + continue + for current_root, dir_names, file_names in os.walk(category_path, followlinks=True): + dir_names[:] = sorted(name for name in dir_names if not name.startswith(".")) + if SKILL_MD_FILE not in file_names: + continue + yield category, category_path, Path(current_root) / SKILL_MD_FILE + + def read_custom_skill(self, name: str) -> str: + if not self.custom_skill_exists(name): + raise FileNotFoundError(f"Custom skill '{name}' not found.") + return (self.get_custom_skill_dir(name) / SKILL_MD_FILE).read_text(encoding="utf-8") + + def write_custom_skill(self, name: str, relative_path: str, content: str) -> None: + target = self.validate_relative_path(relative_path, self.get_custom_skill_dir(name)) + target.parent.mkdir(parents=True, exist_ok=True) + with tempfile.NamedTemporaryFile( + "w", + encoding="utf-8", + delete=False, + dir=str(target.parent), + ) as tmp_file: + tmp_file.write(content) + tmp_path = Path(tmp_file.name) + tmp_path.replace(target) + + async def ainstall_skill_from_archive(self, archive_path: str | Path) -> dict: + import zipfile + + from deerflow.skills.installer import ( + SkillAlreadyExistsError, + _move_staged_skill_into_reserved_target, + _scan_skill_archive_contents_or_raise, + resolve_skill_dir_from_archive, + safe_extract_skill_archive, + ) + from deerflow.skills.validation import _validate_skill_frontmatter + + logger.info("Installing skill from %s", archive_path) + path = Path(archive_path) + if not path.is_file(): + if not path.exists(): + raise FileNotFoundError(f"Skill file not found: {archive_path}") + raise ValueError(f"Path is not a file: {archive_path}") + if path.suffix != ".skill": + raise ValueError("File must have .skill extension") + + custom_dir = self._host_root / "custom" + custom_dir.mkdir(parents=True, exist_ok=True) + + with tempfile.TemporaryDirectory() as tmp: + tmp_path = Path(tmp) + + try: + zf = zipfile.ZipFile(path, "r") + except FileNotFoundError: + raise FileNotFoundError(f"Skill file not found: {archive_path}") from None + except (zipfile.BadZipFile, IsADirectoryError): + raise ValueError("File is not a valid ZIP archive") from None + + with zf: + safe_extract_skill_archive(zf, tmp_path) + + skill_dir = resolve_skill_dir_from_archive(tmp_path) + + is_valid, message, skill_name = _validate_skill_frontmatter(skill_dir) + if not is_valid: + raise ValueError(f"Invalid skill: {message}") + if not skill_name or "/" in skill_name or "\\" in skill_name or ".." in skill_name: + raise ValueError(f"Invalid skill name: {skill_name}") + + target = custom_dir / skill_name + if target.exists(): + raise SkillAlreadyExistsError(f"Skill '{skill_name}' already exists") + + await _scan_skill_archive_contents_or_raise(skill_dir, skill_name) + + with tempfile.TemporaryDirectory(prefix=f".installing-{skill_name}-", dir=custom_dir) as staging_root: + staging_target = Path(staging_root) / skill_name + shutil.copytree(skill_dir, staging_target) + _move_staged_skill_into_reserved_target(staging_target, target) + logger.info("Skill %r installed to %s", skill_name, target) + + return { + "success": True, + "skill_name": skill_name, + "message": f"Skill '{skill_name}' installed successfully", + } + + def delete_custom_skill(self, name: str, *, history_meta: dict | None = None) -> None: + self.validate_skill_name(name) + self.ensure_custom_skill_is_editable(name) + target = self.get_custom_skill_dir(name) + if history_meta is not None: + prev_content = self.read_custom_skill(name) + try: + self.append_history(name, {**history_meta, "prev_content": prev_content}) + except OSError as e: + if not isinstance(e, PermissionError) and e.errno not in {errno.EACCES, errno.EPERM, errno.EROFS}: + raise + logger.warning( + "Skipping delete history write for custom skill %s due to readonly/permission failure; continuing with skill directory removal: %s", + name, + e, + ) + if target.exists(): + shutil.rmtree(target) + + def append_history(self, name: str, record: dict) -> None: + self.validate_skill_name(name) + payload = {"ts": datetime.now(UTC).isoformat(), **record} + history_path = self.get_skill_history_file(name) + history_path.parent.mkdir(parents=True, exist_ok=True) + with history_path.open("a", encoding="utf-8") as f: + f.write(json.dumps(payload, ensure_ascii=False)) + f.write("\n") + + def read_history(self, name: str) -> list[dict]: + self.validate_skill_name(name) + history_path = self.get_skill_history_file(name) + if not history_path.exists(): + return [] + records: list[dict] = [] + for line in history_path.read_text(encoding="utf-8").splitlines(): + if not line.strip(): + continue + records.append(json.loads(line)) + return records diff --git a/backend/packages/harness/deerflow/skills/storage/skill_storage.py b/backend/packages/harness/deerflow/skills/storage/skill_storage.py new file mode 100644 index 000000000..15af90102 --- /dev/null +++ b/backend/packages/harness/deerflow/skills/storage/skill_storage.py @@ -0,0 +1,254 @@ +"""Abstract SkillStorage base class with template-method flows.""" + +from __future__ import annotations + +import logging +import re +from abc import ABC, abstractmethod +from collections.abc import Iterable +from pathlib import Path + +from deerflow.skills.types import SKILL_MD_FILE, Skill, SkillCategory # noqa: F401 + +logger = logging.getLogger(__name__) + +_SKILL_NAME_PATTERN = re.compile(r"^[a-z0-9]+(?:-[a-z0-9]+)*$") + + +class SkillStorage(ABC): + """Abstract base for skill storage backends. + + Subclasses implement a small set of storage-medium-specific atomic + operations; this base class provides final template-method flows + (load_skills, history serialisation, path helpers, validation) that + compose them with protocol-level helpers. + """ + + def __init__(self, container_path: str = "/mnt/skills") -> None: + self._container_root = container_path + + # ------------------------------------------------------------------ + # Static protocol helpers (not storage-specific) + # ------------------------------------------------------------------ + + @staticmethod + def validate_skill_name(name: str) -> str: + """Validate and normalise a skill name; return the normalised form.""" + normalized = name.strip() + if not _SKILL_NAME_PATTERN.fullmatch(normalized): + raise ValueError("Skill name must be hyphen-case using lowercase letters, digits, and hyphens only.") + if len(normalized) > 64: + raise ValueError("Skill name must be 64 characters or fewer.") + return normalized + + @staticmethod + def validate_relative_path(relative_path: str, base_dir: Path) -> Path: + """Validate *relative_path* against *base_dir* and return the resolved target. + + Checks that *relative_path* is non-empty, then joins it with *base_dir* + and resolves the result (following symlinks). Raises ``ValueError`` if + the resolved target does not lie within *base_dir*. + """ + if not relative_path: + raise ValueError("relative_path must not be empty.") + resolved_base = base_dir.resolve() + target = (resolved_base / relative_path).resolve() + try: + target.relative_to(resolved_base) + except ValueError as exc: + raise ValueError("relative_path must resolve within the skill directory.") from exc + return target + + @staticmethod + def validate_skill_markdown_content(name: str, content: str) -> None: + """Validate SKILL.md content: parse frontmatter and check name matches.""" + import tempfile + + from deerflow.skills.validation import _validate_skill_frontmatter + + with tempfile.TemporaryDirectory() as tmp_dir: + temp_skill_dir = Path(tmp_dir) / SkillStorage.validate_skill_name(name) + temp_skill_dir.mkdir(parents=True, exist_ok=True) + (temp_skill_dir / SKILL_MD_FILE).write_text(content, encoding="utf-8") + is_valid, message, parsed_name = _validate_skill_frontmatter(temp_skill_dir) + if not is_valid: + raise ValueError(message) + if parsed_name != name: + raise ValueError(f"Frontmatter name '{parsed_name}' must match requested skill name '{name}'.") + + def ensure_safe_support_path(self, name: str, relative_path: str) -> Path: + """Validate and return the resolved absolute path for a support file.""" + _ALLOWED_SUPPORT_SUBDIRS = {"references", "templates", "scripts", "assets"} + skill_dir = self.get_custom_skill_dir(self.validate_skill_name(name)).resolve() + if not relative_path or relative_path.endswith("/"): + raise ValueError("Supporting file path must include a filename.") + relative = Path(relative_path) + if relative.is_absolute(): + raise ValueError("Supporting file path must be relative.") + if any(part in {"..", ""} for part in relative.parts): + raise ValueError("Supporting file path must not contain parent-directory traversal.") + top_level = relative.parts[0] if relative.parts else "" + if top_level not in _ALLOWED_SUPPORT_SUBDIRS: + raise ValueError(f"Supporting files must live under one of: {', '.join(sorted(_ALLOWED_SUPPORT_SUBDIRS))}.") + target = (skill_dir / relative).resolve() + allowed_root = (skill_dir / top_level).resolve() + try: + target.relative_to(allowed_root) + except ValueError as exc: + raise ValueError("Supporting file path must stay within the selected support directory.") from exc + return target + + # ------------------------------------------------------------------ + # Abstract atomic operations (storage-medium specific) + # ------------------------------------------------------------------ + + @abstractmethod + def get_skills_root_path(self) -> Path: + """Absolute host path to the skills root, used for sandbox mounts. + + Origin: ``deerflow.skills.loader.get_skills_root_path``. + """ + + @abstractmethod + def _iter_skill_files(self) -> Iterable[tuple[SkillCategory, Path, Path]]: + """Yield ``(category, category_root, skill_md_path)`` for every SKILL.md. + + Origin: extracted from directory-walk logic inside + ``deerflow.skills.loader.load_skills``. + """ + + @abstractmethod + def read_custom_skill(self, name: str) -> str: + """Read SKILL.md content for a custom skill. + + Origin: ``deerflow.skills.manager.read_custom_skill_content``. + """ + + @abstractmethod + def write_custom_skill(self, name: str, relative_path: str, content: str) -> None: + """Atomically write a text file under ``custom//``. + + Origin: ``deerflow.skills.manager.atomic_write``. + """ + + @abstractmethod + async def ainstall_skill_from_archive(self, archive_path: str | Path) -> dict: + """Async install of a skill from a ``.skill`` ZIP archive. + + Origin: ``deerflow.skills.installer.ainstall_skill_from_archive``. + """ + + def install_skill_from_archive(self, archive_path: str | Path) -> dict: + """Sync wrapper — delegates to :meth:`ainstall_skill_from_archive`.""" + from deerflow.skills.installer import _run_async_install + + return _run_async_install(self.ainstall_skill_from_archive(archive_path)) + + @abstractmethod + def delete_custom_skill(self, name: str, *, history_meta: dict | None = None) -> None: + """Delete a custom skill (validation + optional history + directory removal). + + Origin: ``app.gateway.routers.skills.delete_custom_skill`` + ``skill_manage_tool``. + """ + + @abstractmethod + def custom_skill_exists(self, name: str) -> bool: + """Origin: ``deerflow.skills.manager.custom_skill_exists``.""" + + @abstractmethod + def public_skill_exists(self, name: str) -> bool: + """Origin: ``deerflow.skills.manager.public_skill_exists``.""" + + @abstractmethod + def append_history(self, name: str, record: dict) -> None: + """Append a JSONL history entry for ``name``. + + Origin: ``deerflow.skills.manager.append_history``. + """ + + @abstractmethod + def read_history(self, name: str) -> list[dict]: + """Return all history records for ``name``, oldest first. + + Origin: ``deerflow.skills.manager.read_history``. + """ + + # ------------------------------------------------------------------ + # Concrete path helpers (layout is part of the SKILL.md protocol) + # ------------------------------------------------------------------ + + def get_container_root(self) -> str: + """Origin: ``deerflow.config.skills_config.SkillsConfig.container_path`` accessor.""" + return self._container_root + + def get_custom_skill_dir(self, name: str) -> Path: + """Path to ``custom/``. Does not create the directory. + + Origin: ``deerflow.skills.manager.get_custom_skill_dir``. + """ + normalized_name = self.validate_skill_name(name) + return self.get_skills_root_path() / SkillCategory.CUSTOM.value / normalized_name + + def get_custom_skill_file(self, name: str) -> Path: + """Path to ``custom//SKILL.md``. + + Origin: ``deerflow.skills.manager.get_custom_skill_file``. + """ + normalized_name = self.validate_skill_name(name) + return self.get_custom_skill_dir(normalized_name) / SKILL_MD_FILE + + def get_skill_history_file(self, name: str) -> Path: + """Path to ``custom/.history/.jsonl``. Does not create parents. + + Origin: ``deerflow.skills.manager.get_skill_history_file``. + """ + normalized_name = self.validate_skill_name(name) + return self.get_skills_root_path() / SkillCategory.CUSTOM.value / ".history" / f"{normalized_name}.jsonl" + + # ------------------------------------------------------------------ + # Final template-method flows + # ------------------------------------------------------------------ + + def load_skills(self, *, enabled_only: bool = False) -> list[Skill]: + """Discover all skills, merge enabled state, sort and optionally filter. + + Origin: ``deerflow.skills.loader.load_skills``. + """ + from deerflow.skills.parser import parse_skill_file + + skills_by_name: dict[str, Skill] = {} + for category, category_root, md_path in self._iter_skill_files(): + skill = parse_skill_file( + md_path, + category=category, + relative_path=md_path.parent.relative_to(category_root), + ) + if skill: + skills_by_name[skill.name] = skill + + skills = list(skills_by_name.values()) + + # Merge enabled state from extensions config (re-read every call so + # changes made by another process are picked up immediately). + try: + from deerflow.config.extensions_config import ExtensionsConfig + + extensions_config = ExtensionsConfig.from_file() + for skill in skills: + skill.enabled = extensions_config.is_skill_enabled(skill.name, skill.category) + except Exception as e: + logger.warning("Failed to load extensions config: %s", e) + + if enabled_only: + skills = [s for s in skills if s.enabled] + + skills.sort(key=lambda s: s.name) + return skills + + def ensure_custom_skill_is_editable(self, name: str) -> None: + """Origin: ``deerflow.skills.manager.ensure_custom_skill_is_editable``.""" + if self.custom_skill_exists(name): + return + if self.public_skill_exists(name): + raise ValueError(f"'{name}' is a built-in skill. To customise it, create a new skill with the same name under skills/custom/.") + raise FileNotFoundError(f"Custom skill '{name}' not found.") diff --git a/backend/packages/harness/deerflow/skills/types.py b/backend/packages/harness/deerflow/skills/types.py index 0cdb668f3..fcf37ca2f 100644 --- a/backend/packages/harness/deerflow/skills/types.py +++ b/backend/packages/harness/deerflow/skills/types.py @@ -1,6 +1,20 @@ from dataclasses import dataclass +from enum import StrEnum from pathlib import Path +SKILL_MD_FILE = "SKILL.md" + + +class SkillCategory(StrEnum): + """Source category for a skill. + + - ``PUBLIC``: built-in skill bundled with the platform, read-only. + - ``CUSTOM``: user-authored skill that can be edited or deleted. + """ + + PUBLIC = "public" + CUSTOM = "custom" + @dataclass class Skill: @@ -12,7 +26,7 @@ class Skill: skill_dir: Path skill_file: Path relative_path: Path # Relative path from category root to skill directory - category: str # 'public' or 'custom' + category: SkillCategory # 'public' or 'custom' enabled: bool = False # Whether this skill is enabled @property diff --git a/backend/packages/harness/deerflow/skills/validation.py b/backend/packages/harness/deerflow/skills/validation.py index 4c0f80857..f8af5d3d0 100644 --- a/backend/packages/harness/deerflow/skills/validation.py +++ b/backend/packages/harness/deerflow/skills/validation.py @@ -8,6 +8,8 @@ from pathlib import Path import yaml +from deerflow.skills.types import SKILL_MD_FILE + # Allowed properties in SKILL.md frontmatter ALLOWED_FRONTMATTER_PROPERTIES = {"name", "description", "license", "allowed-tools", "metadata", "compatibility", "version", "author"} @@ -21,9 +23,9 @@ def _validate_skill_frontmatter(skill_dir: Path) -> tuple[bool, str, str | None] Returns: Tuple of (is_valid, message, skill_name). """ - skill_md = skill_dir / "SKILL.md" + skill_md = skill_dir / SKILL_MD_FILE if not skill_md.exists(): - return False, "SKILL.md not found", None + return False, f"{SKILL_MD_FILE} not found", None content = skill_md.read_text(encoding="utf-8") if not content.startswith("---"): diff --git a/backend/packages/harness/deerflow/subagents/config.py b/backend/packages/harness/deerflow/subagents/config.py index 8554e7d4d..a2c961b9d 100644 --- a/backend/packages/harness/deerflow/subagents/config.py +++ b/backend/packages/harness/deerflow/subagents/config.py @@ -13,6 +13,8 @@ class SubagentConfig: system_prompt: The system prompt that guides the subagent's behavior. tools: Optional list of tool names to allow. If None, inherits all tools. disallowed_tools: Optional list of tool names to deny. + skills: Optional list of skill names to load. If None, inherits all enabled skills. + If an empty list, no skills are loaded. model: Model to use - 'inherit' uses parent's model. max_turns: Maximum number of agent turns before stopping. timeout_seconds: Maximum execution time in seconds (default: 900 = 15 minutes). @@ -23,6 +25,7 @@ class SubagentConfig: system_prompt: str tools: list[str] | None = None disallowed_tools: list[str] | None = field(default_factory=lambda: ["task"]) + skills: list[str] | None = None model: str = "inherit" max_turns: int = 50 timeout_seconds: int = 900 diff --git a/backend/packages/harness/deerflow/subagents/executor.py b/backend/packages/harness/deerflow/subagents/executor.py index 5529bec2c..539244af8 100644 --- a/backend/packages/harness/deerflow/subagents/executor.py +++ b/backend/packages/harness/deerflow/subagents/executor.py @@ -1,11 +1,14 @@ """Subagent execution engine.""" import asyncio +import atexit import logging import threading import uuid +from collections.abc import Callable, Coroutine from concurrent.futures import Future, ThreadPoolExecutor from concurrent.futures import TimeoutError as FuturesTimeoutError +from contextvars import Context, copy_context from dataclasses import dataclass, field from datetime import datetime from enum import Enum @@ -13,16 +16,23 @@ from typing import Any from langchain.agents import create_agent from langchain.tools import BaseTool -from langchain_core.messages import AIMessage, HumanMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage from langchain_core.runnables import RunnableConfig from deerflow.agents.thread_state import SandboxState, ThreadDataState, ThreadState +from deerflow.config.app_config import AppConfig from deerflow.models import create_chat_model from deerflow.subagents.config import SubagentConfig logger = logging.getLogger(__name__) +_previous_shutdown_isolated_subagent_loop = globals().get("_shutdown_isolated_subagent_loop") +if callable(_previous_shutdown_isolated_subagent_loop): + atexit.unregister(_previous_shutdown_isolated_subagent_loop) + _previous_shutdown_isolated_subagent_loop() + + class SubagentStatus(Enum): """Status of a subagent execution.""" @@ -72,12 +82,105 @@ _background_tasks_lock = threading.Lock() # Thread pool for background task scheduling and orchestration _scheduler_pool = ThreadPoolExecutor(max_workers=3, thread_name_prefix="subagent-scheduler-") -# Thread pool for actual subagent execution (with timeout support) -# Larger pool to avoid blocking when scheduler submits execution tasks -_execution_pool = ThreadPoolExecutor(max_workers=3, thread_name_prefix="subagent-exec-") +# Persistent event loop for isolated subagent executions triggered from an +# already-running parent loop. Reusing one long-lived loop avoids creating a +# fresh loop per execution and then closing async resources bound to it. +_isolated_subagent_loop: asyncio.AbstractEventLoop | None = None +_isolated_subagent_loop_thread: threading.Thread | None = None +_isolated_subagent_loop_started: threading.Event | None = None +_isolated_subagent_loop_lock = threading.Lock() -# Dedicated pool for sync execute() calls made from an already-running event loop. -_isolated_loop_pool = ThreadPoolExecutor(max_workers=3, thread_name_prefix="subagent-isolated-") + +def _run_isolated_subagent_loop( + loop: asyncio.AbstractEventLoop, + started_event: threading.Event, +) -> None: + """Run the persistent isolated subagent loop in a dedicated daemon thread.""" + asyncio.set_event_loop(loop) + loop.call_soon(started_event.set) + try: + loop.run_forever() + finally: + started_event.clear() + + +def _shutdown_isolated_subagent_loop() -> None: + """Stop and close the persistent isolated subagent loop.""" + global _isolated_subagent_loop, _isolated_subagent_loop_thread, _isolated_subagent_loop_started + + with _isolated_subagent_loop_lock: + loop = _isolated_subagent_loop + thread = _isolated_subagent_loop_thread + _isolated_subagent_loop = None + _isolated_subagent_loop_thread = None + _isolated_subagent_loop_started = None + + if loop is None: + return + + if loop.is_running(): + loop.call_soon_threadsafe(loop.stop) + + if thread is not None and thread.is_alive() and thread is not threading.current_thread(): + thread.join(timeout=1) + + thread_stopped = thread is None or not thread.is_alive() + loop_stopped = not loop.is_running() + + if not loop.is_closed(): + if thread_stopped and loop_stopped: + loop.close() + else: + logger.warning( + "Skipping close of isolated subagent loop because shutdown did not complete within timeout (thread_alive=%s, loop_running=%s)", + thread is not None and thread.is_alive(), + loop.is_running(), + ) + + +atexit.register(_shutdown_isolated_subagent_loop) + + +def _get_isolated_subagent_loop() -> asyncio.AbstractEventLoop: + """Return the persistent event loop used by isolated subagent executions.""" + global _isolated_subagent_loop, _isolated_subagent_loop_thread, _isolated_subagent_loop_started + with _isolated_subagent_loop_lock: + thread_is_alive = _isolated_subagent_loop_thread is not None and _isolated_subagent_loop_thread.is_alive() + loop_is_usable = _isolated_subagent_loop is not None and not _isolated_subagent_loop.is_closed() and _isolated_subagent_loop.is_running() and thread_is_alive + + if not loop_is_usable: + loop = asyncio.new_event_loop() + started_event = threading.Event() + thread = threading.Thread( + target=_run_isolated_subagent_loop, + args=(loop, started_event), + name="subagent-persistent-loop", + daemon=True, + ) + thread.start() + if not started_event.wait(timeout=5): + loop.call_soon_threadsafe(loop.stop) + thread.join(timeout=1) + loop.close() + raise RuntimeError("Timed out starting isolated subagent event loop") + _isolated_subagent_loop = loop + _isolated_subagent_loop_thread = thread + _isolated_subagent_loop_started = started_event + + return _isolated_subagent_loop + + +def _submit_to_isolated_loop_in_context( + context: Context, + coro_factory: Callable[[], Coroutine[Any, Any, SubagentResult]], +) -> Future[SubagentResult]: + """Submit a coroutine to the isolated loop while preserving ContextVar state.""" + return context.run( + lambda: asyncio.run_coroutine_threadsafe( + coro_factory(), + _get_isolated_subagent_loop(), + ) + ) def _filter_tools( @@ -132,6 +235,7 @@ class SubagentExecutor: self, config: SubagentConfig, tools: list[BaseTool], + app_config: AppConfig | None = None, parent_model: str | None = None, sandbox_state: SandboxState | None = None, thread_data: ThreadDataState | None = None, @@ -143,6 +247,9 @@ class SubagentExecutor: Args: config: Subagent configuration. tools: List of all available tools (will be filtered). + app_config: Resolved AppConfig; threaded into middleware factories + at agent-build time. When None, ``_create_agent`` falls back to + ``get_app_config()`` (matches the lead-agent factory's pattern). parent_model: The parent agent's model name for inheritance. sandbox_state: Sandbox state from parent agent. thread_data: Thread data from parent agent. @@ -150,6 +257,7 @@ class SubagentExecutor: trace_id: Trace ID from parent for distributed tracing. """ self.config = config + self.app_config = app_config self.parent_model = parent_model self.sandbox_state = sandbox_state self.thread_data = thread_data @@ -168,13 +276,17 @@ class SubagentExecutor: def _create_agent(self): """Create the agent instance.""" + # Mirror lead-agent factory pattern: prefer explicit app_config, + # fall back to ambient lookup at agent-build time. + from deerflow.config import get_app_config + + resolved_app_config = self.app_config or get_app_config() model_name = _get_model_name(self.config, self.parent_model) - model = create_chat_model(name=model_name, thinking_enabled=False) + model = create_chat_model(name=model_name, thinking_enabled=False, app_config=resolved_app_config) from deerflow.agents.middlewares.tool_error_handling_middleware import build_subagent_runtime_middlewares - # Reuse shared middleware composition with lead agent. - middlewares = build_subagent_runtime_middlewares(lazy_init=True) + middlewares = build_subagent_runtime_middlewares(app_config=resolved_app_config, lazy_init=True) return create_agent( model=model, @@ -184,7 +296,63 @@ class SubagentExecutor: state_schema=ThreadState, ) - def _build_initial_state(self, task: str) -> dict[str, Any]: + async def _load_skill_messages(self) -> list[SystemMessage]: + """Load skill content as conversation items based on config.skills. + + Aligned with Codex's pattern: each subagent loads its own skills + per-session and injects them as conversation items (developer messages), + not as system prompt text. The config.skills whitelist controls which + skills are loaded: + - None: load all enabled skills + - []: no skills + - ["skill-a", "skill-b"]: only these skills + + Returns: + List of SystemMessages containing skill content. + """ + if self.config.skills is not None and len(self.config.skills) == 0: + logger.info(f"[trace={self.trace_id}] Subagent {self.config.name} skills=[] — skipping skill loading") + return [] + + try: + from deerflow.skills.storage import get_or_new_skill_storage + + # Use asyncio.to_thread to avoid blocking the event loop (LangGraph ASGI requirement) + all_skills = await asyncio.to_thread(get_or_new_skill_storage().load_skills, enabled_only=True) + logger.info(f"[trace={self.trace_id}] Subagent {self.config.name} loaded {len(all_skills)} enabled skills from disk") + except Exception: + logger.warning(f"[trace={self.trace_id}] Failed to load skills for subagent {self.config.name}", exc_info=True) + return [] + + if not all_skills: + logger.info(f"[trace={self.trace_id}] Subagent {self.config.name} no enabled skills found") + return [] + + # Filter by config.skills whitelist + if self.config.skills is not None: + allowed = set(self.config.skills) + skills = [s for s in all_skills if s.name in allowed] + else: + skills = all_skills + + if not skills: + return [] + + # Read each skill's SKILL.md content and create conversation items + messages = [] + for skill in skills: + try: + content = await asyncio.to_thread(skill.skill_file.read_text, encoding="utf-8") + content = content.strip() + if content: + messages.append(SystemMessage(content=f'\n{content}\n')) + logger.info(f"[trace={self.trace_id}] Subagent {self.config.name} loaded skill: {skill.name}") + except Exception: + logger.debug(f"[trace={self.trace_id}] Failed to read skill {skill.name}", exc_info=True) + + return messages + + async def _build_initial_state(self, task: str) -> dict[str, Any]: """Build the initial state for agent execution. Args: @@ -193,8 +361,17 @@ class SubagentExecutor: Returns: Initial state dictionary. """ + # Load skills as conversation items (Codex pattern) + skill_messages = await self._load_skill_messages() + + messages: list = [] + # Skill content injected as developer/system messages before the task + messages.extend(skill_messages) + # Then the actual task + messages.append(HumanMessage(content=task)) + state: dict[str, Any] = { - "messages": [HumanMessage(content=task)], + "messages": messages, } # Pass through sandbox and thread data from parent @@ -230,7 +407,7 @@ class SubagentExecutor: try: agent = self._create_agent() - state = self._build_initial_state(task) + state = await self._build_initial_state(task) # Build config with thread_id for sandbox access and recursion limit run_config: RunnableConfig = { @@ -378,42 +555,40 @@ class SubagentExecutor: return result def _execute_in_isolated_loop(self, task: str, result_holder: SubagentResult | None = None) -> SubagentResult: - """Execute the subagent in a completely fresh event loop. + """Execute the subagent on the persistent isolated event loop. - This method is designed to run in a separate thread to ensure complete - isolation from any parent event loop, preventing conflicts with asyncio - primitives that may be bound to the parent loop (e.g., httpx clients). + This method is used by the sync ``execute()`` path when the caller is + already running inside an event loop. Because ``execute()`` is a sync + API, this path blocks the caller while the actual coroutine runs on the + long-lived isolated loop. Reusing that loop keeps shared async clients + from being tied to a short-lived loop that gets closed per execution. """ + future: Future[SubagentResult] | None = None + parent_context = copy_context() try: - previous_loop = asyncio.get_event_loop() - except RuntimeError: - previous_loop = None - - # Create and set a new event loop for this thread - loop = asyncio.new_event_loop() - try: - asyncio.set_event_loop(loop) - return loop.run_until_complete(self._aexecute(task, result_holder)) - finally: - try: - pending = asyncio.all_tasks(loop) - if pending: - for task_obj in pending: - task_obj.cancel() - loop.run_until_complete(asyncio.gather(*pending, return_exceptions=True)) - - loop.run_until_complete(loop.shutdown_asyncgens()) - loop.run_until_complete(loop.shutdown_default_executor()) - except Exception: + future = _submit_to_isolated_loop_in_context( + parent_context, + lambda: self._aexecute(task, result_holder), + ) + return future.result(timeout=self.config.timeout_seconds) + except FuturesTimeoutError: + if result_holder is not None: + result_holder.cancel_event.set() + if future is not None: + future.cancel() + raise + except Exception: + if future is None: logger.debug( - f"[trace={self.trace_id}] Failed while cleaning up isolated event loop for subagent {self.config.name}", + f"[trace={self.trace_id}] Failed to submit subagent {self.config.name} to the isolated event loop", exc_info=True, ) - finally: - try: - loop.close() - finally: - asyncio.set_event_loop(previous_loop) + else: + logger.debug( + f"[trace={self.trace_id}] Subagent {self.config.name} failed while executing on the isolated event loop", + exc_info=True, + ) + raise def execute(self, task: str, result_holder: SubagentResult | None = None) -> SubagentResult: """Execute a task synchronously (wrapper around async execution). @@ -422,9 +597,9 @@ class SubagentExecutor: asynchronous tools (like MCP tools) to be used within the thread pool. When called from within an already-running event loop (e.g., when the - parent agent is async), this method isolates the subagent execution in - a separate thread to avoid event loop conflicts with shared async - primitives like httpx clients. + parent agent is async), this method synchronously waits on the + persistent isolated loop to avoid event loop conflicts with shared + async primitives like httpx clients. Args: task: The task description for the subagent. @@ -440,9 +615,8 @@ class SubagentExecutor: loop = None if loop is not None and loop.is_running(): - logger.debug(f"[trace={self.trace_id}] Subagent {self.config.name} detected running event loop, using isolated thread") - future = _isolated_loop_pool.submit(self._execute_in_isolated_loop, task, result_holder) - return future.result() + logger.debug(f"[trace={self.trace_id}] Subagent {self.config.name} detected running event loop, using isolated loop") + return self._execute_in_isolated_loop(task, result_holder) # Standard path: no running event loop, use asyncio.run return asyncio.run(self._aexecute(task, result_holder)) @@ -488,6 +662,8 @@ class SubagentExecutor: with _background_tasks_lock: _background_tasks[task_id] = result + parent_context = copy_context() + # Submit to scheduler pool def run_task(): with _background_tasks_lock: @@ -496,9 +672,12 @@ class SubagentExecutor: result_holder = _background_tasks[task_id] try: - # Submit execution to execution pool with timeout - # Pass result_holder so execute() can update it in real-time - execution_future: Future = _execution_pool.submit(self.execute, task, result_holder) + # Submit execution directly to the persistent isolated loop so the + # background path does not create a temporary loop via execute(). + execution_future = _submit_to_isolated_loop_in_context( + parent_context, + lambda: self._aexecute(task, result_holder), + ) try: # Wait for execution with timeout exec_result = execution_future.result(timeout=self.config.timeout_seconds) diff --git a/backend/packages/harness/deerflow/subagents/registry.py b/backend/packages/harness/deerflow/subagents/registry.py index e54f69f76..b34d7e9bd 100644 --- a/backend/packages/harness/deerflow/subagents/registry.py +++ b/backend/packages/harness/deerflow/subagents/registry.py @@ -10,53 +10,100 @@ from deerflow.subagents.config import SubagentConfig logger = logging.getLogger(__name__) +def _build_custom_subagent_config(name: str) -> SubagentConfig | None: + """Build a SubagentConfig from config.yaml custom_agents section. + + Args: + name: The name of the custom subagent. + + Returns: + SubagentConfig if found in custom_agents, None otherwise. + """ + from deerflow.config.subagents_config import get_subagents_app_config + + app_config = get_subagents_app_config() + custom = app_config.custom_agents.get(name) + if custom is None: + return None + + return SubagentConfig( + name=name, + description=custom.description, + system_prompt=custom.system_prompt, + tools=custom.tools, + disallowed_tools=custom.disallowed_tools, + skills=custom.skills, + model=custom.model, + max_turns=custom.max_turns, + timeout_seconds=custom.timeout_seconds, + ) + + def get_subagent_config(name: str) -> SubagentConfig | None: """Get a subagent configuration by name, with config.yaml overrides applied. + Resolution order (mirrors Codex's config layering): + 1. Built-in subagents (general-purpose, bash) + 2. Custom subagents from config.yaml custom_agents section + 3. Per-agent overrides from config.yaml agents section (timeout, max_turns, model, skills) + Args: name: The name of the subagent. Returns: SubagentConfig if found (with any config.yaml overrides applied), None otherwise. """ + # Step 1: Look up built-in, then fall back to custom_agents config = BUILTIN_SUBAGENTS.get(name) + if config is None: + config = _build_custom_subagent_config(name) if config is None: return None - # Apply runtime overrides (timeout, max_turns, model) from config.yaml + # Step 2: Apply per-agent overrides from config.yaml agents section. + # Only explicit per-agent overrides are applied here. Global defaults + # (timeout_seconds, max_turns at the top level) apply to built-in agents + # but must NOT override custom agents' own values — custom agents define + # their own defaults in the custom_agents section. # Lazy import to avoid circular deps. from deerflow.config.subagents_config import get_subagents_app_config app_config = get_subagents_app_config() - effective_timeout = app_config.get_timeout_for(name) - effective_max_turns = app_config.get_max_turns_for(name, config.max_turns) + is_builtin = name in BUILTIN_SUBAGENTS + agent_override = app_config.agents.get(name) overrides = {} - if effective_timeout != config.timeout_seconds: - logger.debug( - "Subagent '%s': timeout overridden by config.yaml (%ss -> %ss)", - name, - config.timeout_seconds, - effective_timeout, - ) - overrides["timeout_seconds"] = effective_timeout - if effective_max_turns != config.max_turns: - logger.debug( - "Subagent '%s': max_turns overridden by config.yaml (%s -> %s)", - name, - config.max_turns, - effective_max_turns, - ) - overrides["max_turns"] = effective_max_turns + + # Timeout: per-agent override > global default (builtins only) > config's own value + if agent_override is not None and agent_override.timeout_seconds is not None: + if agent_override.timeout_seconds != config.timeout_seconds: + logger.debug("Subagent '%s': timeout overridden (%ss -> %ss)", name, config.timeout_seconds, agent_override.timeout_seconds) + overrides["timeout_seconds"] = agent_override.timeout_seconds + elif is_builtin and app_config.timeout_seconds != config.timeout_seconds: + logger.debug("Subagent '%s': timeout from global default (%ss -> %ss)", name, config.timeout_seconds, app_config.timeout_seconds) + overrides["timeout_seconds"] = app_config.timeout_seconds + + # Max turns: per-agent override > global default (builtins only) > config's own value + if agent_override is not None and agent_override.max_turns is not None: + if agent_override.max_turns != config.max_turns: + logger.debug("Subagent '%s': max_turns overridden (%s -> %s)", name, config.max_turns, agent_override.max_turns) + overrides["max_turns"] = agent_override.max_turns + elif is_builtin and app_config.max_turns is not None and app_config.max_turns != config.max_turns: + logger.debug("Subagent '%s': max_turns from global default (%s -> %s)", name, config.max_turns, app_config.max_turns) + overrides["max_turns"] = app_config.max_turns + + # Model: per-agent override only (no global default for model) effective_model = app_config.get_model_for(name) if effective_model is not None and effective_model != config.model: - logger.debug( - "Subagent '%s': model overridden by config.yaml (%s -> %s)", - name, - config.model, - effective_model, - ) + logger.debug("Subagent '%s': model overridden (%s -> %s)", name, config.model, effective_model) overrides["model"] = effective_model + + # Skills: per-agent override only (no global default for skills) + effective_skills = app_config.get_skills_for(name) + if effective_skills is not None and effective_skills != config.skills: + logger.debug("Subagent '%s': skills overridden (%s -> %s)", name, config.skills, effective_skills) + overrides["skills"] = effective_skills + if overrides: config = replace(config, **overrides) @@ -67,18 +114,33 @@ def list_subagents() -> list[SubagentConfig]: """List all available subagent configurations (with config.yaml overrides applied). Returns: - List of all registered SubagentConfig instances. + List of all registered SubagentConfig instances (built-in + custom). """ - return [get_subagent_config(name) for name in BUILTIN_SUBAGENTS] + configs = [] + for name in get_subagent_names(): + config = get_subagent_config(name) + if config is not None: + configs.append(config) + return configs def get_subagent_names() -> list[str]: - """Get all available subagent names. + """Get all available subagent names (built-in + custom). Returns: List of subagent names. """ - return list(BUILTIN_SUBAGENTS.keys()) + names = list(BUILTIN_SUBAGENTS.keys()) + + # Merge custom_agents from config.yaml + from deerflow.config.subagents_config import get_subagents_app_config + + app_config = get_subagents_app_config() + for custom_name in app_config.custom_agents: + if custom_name not in names: + names.append(custom_name) + + return names def get_available_subagent_names() -> list[str]: @@ -87,11 +149,11 @@ def get_available_subagent_names() -> list[str]: Returns: List of subagent names visible to the current sandbox configuration. """ - names = list(BUILTIN_SUBAGENTS.keys()) + names = get_subagent_names() try: host_bash_allowed = is_host_bash_allowed() except Exception: - logger.debug("Could not determine host bash availability; exposing all built-in subagents") + logger.debug("Could not determine host bash availability; exposing all subagents") return names if not host_bash_allowed: diff --git a/backend/packages/harness/deerflow/tools/builtins/invoke_acp_agent_tool.py b/backend/packages/harness/deerflow/tools/builtins/invoke_acp_agent_tool.py index baf7f8ff5..618649020 100644 --- a/backend/packages/harness/deerflow/tools/builtins/invoke_acp_agent_tool.py +++ b/backend/packages/harness/deerflow/tools/builtins/invoke_acp_agent_tool.py @@ -33,11 +33,12 @@ def _get_work_dir(thread_id: str | None) -> str: An absolute physical filesystem path to use as the working directory. """ from deerflow.config.paths import get_paths + from deerflow.runtime.user_context import get_effective_user_id paths = get_paths() if thread_id: try: - work_dir = paths.acp_workspace_dir(thread_id) + work_dir = paths.acp_workspace_dir(thread_id, user_id=get_effective_user_id()) except ValueError: logger.warning("Invalid thread_id %r for ACP workspace, falling back to global", thread_id) work_dir = paths.base_dir / "acp-workspace" diff --git a/backend/packages/harness/deerflow/tools/builtins/present_file_tool.py b/backend/packages/harness/deerflow/tools/builtins/present_file_tool.py index 13ddd247e..13a7a017e 100644 --- a/backend/packages/harness/deerflow/tools/builtins/present_file_tool.py +++ b/backend/packages/harness/deerflow/tools/builtins/present_file_tool.py @@ -9,6 +9,7 @@ from langgraph.typing import ContextT from deerflow.agents.thread_state import ThreadState from deerflow.config.paths import VIRTUAL_PATH_PREFIX, get_paths +from deerflow.runtime.user_context import get_effective_user_id OUTPUTS_VIRTUAL_PREFIX = f"{VIRTUAL_PATH_PREFIX}/outputs" @@ -65,7 +66,10 @@ def _normalize_presented_filepath( virtual_prefix = VIRTUAL_PATH_PREFIX.lstrip("/") if stripped == virtual_prefix or stripped.startswith(virtual_prefix + "/"): - actual_path = get_paths().resolve_virtual_path(thread_id, filepath) + try: + actual_path = get_paths().resolve_virtual_path(thread_id, filepath, user_id=get_effective_user_id()) + except TypeError: + actual_path = get_paths().resolve_virtual_path(thread_id, filepath) else: actual_path = Path(filepath).expanduser().resolve() diff --git a/backend/packages/harness/deerflow/tools/builtins/setup_agent_tool.py b/backend/packages/harness/deerflow/tools/builtins/setup_agent_tool.py index a42f8bbef..793ccb13a 100644 --- a/backend/packages/harness/deerflow/tools/builtins/setup_agent_tool.py +++ b/backend/packages/harness/deerflow/tools/builtins/setup_agent_tool.py @@ -17,12 +17,14 @@ def setup_agent( soul: str, description: str, runtime: ToolRuntime, + skills: list[str] | None = None, ) -> Command: """Setup the custom DeerFlow agent. Args: soul: Full SOUL.md content defining the agent's personality and behavior. description: One-line description of what the agent does. + skills: Optional list of skill names this agent should use. None means use all enabled skills, empty list means no skills. """ agent_name: str | None = runtime.context.get("agent_name") if runtime.context else None @@ -41,6 +43,8 @@ def setup_agent( config_data: dict = {"name": agent_name} if description: config_data["description"] = description + if skills is not None: + config_data["skills"] = skills config_file = agent_dir / "config.yaml" with open(config_file, "w", encoding="utf-8") as f: diff --git a/backend/packages/harness/deerflow/tools/builtins/task_tool.py b/backend/packages/harness/deerflow/tools/builtins/task_tool.py index 437fb37ac..59613272c 100644 --- a/backend/packages/harness/deerflow/tools/builtins/task_tool.py +++ b/backend/packages/harness/deerflow/tools/builtins/task_tool.py @@ -10,7 +10,6 @@ from langchain.tools import InjectedToolCallId, ToolRuntime, tool from langgraph.config import get_stream_writer from langgraph.typing import ContextT -from deerflow.agents.lead_agent.prompt import get_skills_prompt_section from deerflow.agents.thread_state import ThreadState from deerflow.sandbox.security import LOCAL_BASH_SUBAGENT_DISABLED_MESSAGE, is_host_bash_allowed from deerflow.subagents import SubagentExecutor, get_available_subagent_names, get_subagent_config @@ -19,6 +18,17 @@ from deerflow.subagents.executor import SubagentStatus, cleanup_background_task, logger = logging.getLogger(__name__) +def _merge_skill_allowlists(parent: list[str] | None, child: list[str] | None) -> list[str] | None: + """Return the effective subagent skill allowlist under the parent policy.""" + if parent is None: + return child + if child is None: + return list(parent) + + parent_set = set(parent) + return [skill for skill in child if skill in parent_set] + + @tool("task", parse_docstring=True) async def task_tool( runtime: ToolRuntime[ContextT, ThreadState], @@ -35,7 +45,7 @@ async def task_tool( - Handle complex multi-step tasks autonomously - Execute commands or operations in isolated contexts - Available subagent types depend on the active sandbox configuration: + Built-in subagent types: - **general-purpose**: A capable agent for complex, multi-step tasks that require both exploration and action. Use when the task requires complex reasoning, multiple dependent steps, or would benefit from isolated context. @@ -43,6 +53,11 @@ async def task_tool( available when host bash is explicitly allowed or when using an isolated shell sandbox such as `AioSandboxProvider`. + Additional custom subagent types may be defined in config.yaml under + `subagents.custom_agents`. Each custom type can have its own system prompt, + tools, skills, model, and timeout configuration. If an unknown subagent_type + is provided, the error message will list all available types. + When to use this tool: - Complex tasks requiring multiple steps or tools - Tasks that produce verbose output @@ -72,16 +87,13 @@ async def task_tool( # Build config overrides overrides: dict = {} - skills_section = get_skills_prompt_section() - if skills_section: - overrides["system_prompt"] = config.system_prompt + "\n\n" + skills_section + # Skills are loaded by SubagentExecutor per-session (aligned with Codex's pattern: + # each subagent loads its own skills based on config, injected as conversation items). + # No longer appended to system_prompt here. if max_turns is not None: overrides["max_turns"] = max_turns - if overrides: - config = replace(config, **overrides) - # Extract parent context from runtime sandbox_state = None thread_data = None @@ -104,6 +116,13 @@ async def task_tool( # Get or generate trace_id for distributed tracing trace_id = metadata.get("trace_id") or str(uuid.uuid4())[:8] + parent_available_skills = metadata.get("available_skills") + if parent_available_skills is not None: + overrides["skills"] = _merge_skill_allowlists(list(parent_available_skills), config.skills) + + if overrides: + config = replace(config, **overrides) + # Get available tools (excluding task tool to prevent nesting) # Lazy import to avoid circular dependency from deerflow.tools import get_available_tools diff --git a/backend/packages/harness/deerflow/tools/builtins/tool_search.py b/backend/packages/harness/deerflow/tools/builtins/tool_search.py index ffbe2060f..88f4e3112 100644 --- a/backend/packages/harness/deerflow/tools/builtins/tool_search.py +++ b/backend/packages/harness/deerflow/tools/builtins/tool_search.py @@ -112,6 +112,15 @@ class DeferredToolRegistry: def entries(self) -> list[DeferredToolEntry]: return list(self._entries) + @property + def deferred_names(self) -> set[str]: + """Names of tools that are still hidden from model binding.""" + return {entry.name for entry in self._entries} + + def contains(self, name: str) -> bool: + """Return whether *name* is still deferred.""" + return any(entry.name == name for entry in self._entries) + def __len__(self) -> int: return len(self._entries) diff --git a/backend/packages/harness/deerflow/tools/builtins/view_image_tool.py b/backend/packages/harness/deerflow/tools/builtins/view_image_tool.py index e47ab1938..3dedcab70 100644 --- a/backend/packages/harness/deerflow/tools/builtins/view_image_tool.py +++ b/backend/packages/harness/deerflow/tools/builtins/view_image_tool.py @@ -8,7 +8,42 @@ from langchain_core.messages import ToolMessage from langgraph.types import Command from langgraph.typing import ContextT -from deerflow.agents.thread_state import ThreadState +from deerflow.agents.thread_state import ThreadDataState, ThreadState +from deerflow.config.paths import VIRTUAL_PATH_PREFIX + +_ALLOWED_IMAGE_VIRTUAL_ROOTS = ( + f"{VIRTUAL_PATH_PREFIX}/workspace", + f"{VIRTUAL_PATH_PREFIX}/uploads", + f"{VIRTUAL_PATH_PREFIX}/outputs", +) +_ALLOWED_IMAGE_VIRTUAL_ROOTS_TEXT = ", ".join(_ALLOWED_IMAGE_VIRTUAL_ROOTS) +_MAX_IMAGE_BYTES = 20 * 1024 * 1024 +_EXTENSION_TO_MIME = { + ".jpg": "image/jpeg", + ".jpeg": "image/jpeg", + ".png": "image/png", + ".webp": "image/webp", +} + + +def _is_allowed_image_virtual_path(image_path: str) -> bool: + return any(image_path == root or image_path.startswith(f"{root}/") for root in _ALLOWED_IMAGE_VIRTUAL_ROOTS) + + +def _detect_image_mime(image_data: bytes) -> str | None: + if image_data.startswith(b"\xff\xd8\xff"): + return "image/jpeg" + if image_data.startswith(b"\x89PNG\r\n\x1a\n"): + return "image/png" + if len(image_data) >= 12 and image_data.startswith(b"RIFF") and image_data[8:12] == b"WEBP": + return "image/webp" + return None + + +def _sanitize_image_error(error: Exception, thread_data: ThreadDataState | None) -> str: + from deerflow.sandbox.tools import mask_local_paths_in_output + + return mask_local_paths_in_output(f"{type(error).__name__}: {error}", thread_data) @tool("view_image", parse_docstring=True) @@ -29,22 +64,39 @@ def view_image_tool( - For multiple files at once (use present_files instead) Args: - image_path: Absolute path to the image file. Common formats supported: jpg, jpeg, png, webp. + image_path: Absolute /mnt/user-data virtual path to the image file. Common formats supported: jpg, jpeg, png, webp. """ - from deerflow.sandbox.tools import get_thread_data, replace_virtual_path + from deerflow.sandbox.exceptions import SandboxRuntimeError + from deerflow.sandbox.tools import ( + get_thread_data, + resolve_and_validate_user_data_path, + validate_local_tool_path, + ) - # Replace virtual path with actual path - # /mnt/user-data/* paths are mapped to thread-specific directories thread_data = get_thread_data(runtime) - actual_path = replace_virtual_path(image_path, thread_data) - # Validate that the path is absolute - path = Path(actual_path) - if not path.is_absolute(): + if not _is_allowed_image_virtual_path(image_path): return Command( - update={"messages": [ToolMessage(f"Error: Path must be absolute, got: {image_path}", tool_call_id=tool_call_id)]}, + update={ + "messages": [ + ToolMessage( + f"Error: Only image paths under {_ALLOWED_IMAGE_VIRTUAL_ROOTS_TEXT} are allowed", + tool_call_id=tool_call_id, + ) + ] + }, ) + try: + validate_local_tool_path(image_path, thread_data, read_only=True) + actual_path = resolve_and_validate_user_data_path(image_path, thread_data) + except (PermissionError, SandboxRuntimeError) as e: + return Command( + update={"messages": [ToolMessage(f"Error: {str(e)}", tool_call_id=tool_call_id)]}, + ) + + path = Path(actual_path) + # Validate that the file exists if not path.exists(): return Command( @@ -58,34 +110,49 @@ def view_image_tool( ) # Validate image extension - valid_extensions = {".jpg", ".jpeg", ".png", ".webp"} - if path.suffix.lower() not in valid_extensions: + expected_mime_type = _EXTENSION_TO_MIME.get(path.suffix.lower()) + if expected_mime_type is None: return Command( - update={"messages": [ToolMessage(f"Error: Unsupported image format: {path.suffix}. Supported formats: {', '.join(valid_extensions)}", tool_call_id=tool_call_id)]}, + update={"messages": [ToolMessage(f"Error: Unsupported image format: {path.suffix}. Supported formats: {', '.join(_EXTENSION_TO_MIME)}", tool_call_id=tool_call_id)]}, ) # Detect MIME type from file extension mime_type, _ = mimetypes.guess_type(actual_path) if mime_type is None: - # Fallback to default MIME types for common image formats - extension_to_mime = { - ".jpg": "image/jpeg", - ".jpeg": "image/jpeg", - ".png": "image/png", - ".webp": "image/webp", - } - mime_type = extension_to_mime.get(path.suffix.lower(), "application/octet-stream") + mime_type = expected_mime_type + + try: + image_size = path.stat().st_size + except OSError as e: + return Command( + update={"messages": [ToolMessage(f"Error reading image metadata: {_sanitize_image_error(e, thread_data)}", tool_call_id=tool_call_id)]}, + ) + if image_size > _MAX_IMAGE_BYTES: + return Command( + update={"messages": [ToolMessage(f"Error: Image file is too large: {image_size} bytes. Maximum supported size is {_MAX_IMAGE_BYTES} bytes", tool_call_id=tool_call_id)]}, + ) # Read image file and convert to base64 try: with open(actual_path, "rb") as f: image_data = f.read() - image_base64 = base64.b64encode(image_data).decode("utf-8") except Exception as e: return Command( - update={"messages": [ToolMessage(f"Error reading image file: {str(e)}", tool_call_id=tool_call_id)]}, + update={"messages": [ToolMessage(f"Error reading image file: {_sanitize_image_error(e, thread_data)}", tool_call_id=tool_call_id)]}, ) + detected_mime_type = _detect_image_mime(image_data) + if detected_mime_type is None: + return Command( + update={"messages": [ToolMessage("Error: File contents do not match a supported image format", tool_call_id=tool_call_id)]}, + ) + if detected_mime_type != expected_mime_type: + return Command( + update={"messages": [ToolMessage(f"Error: Image contents are {detected_mime_type}, but file extension indicates {expected_mime_type}", tool_call_id=tool_call_id)]}, + ) + mime_type = detected_mime_type + image_base64 = base64.b64encode(image_data).decode("utf-8") + # Update viewed_images in state # The merge_viewed_images reducer will handle merging with existing images new_viewed_images = {image_path: {"base64": image_base64, "mime_type": mime_type}} diff --git a/backend/packages/harness/deerflow/tools/skill_manage_tool.py b/backend/packages/harness/deerflow/tools/skill_manage_tool.py index 3b7a109cc..c0114eb08 100644 --- a/backend/packages/harness/deerflow/tools/skill_manage_tool.py +++ b/backend/packages/harness/deerflow/tools/skill_manage_tool.py @@ -4,7 +4,6 @@ from __future__ import annotations import asyncio import logging -import shutil from typing import Any from weakref import WeakValueDictionary @@ -14,20 +13,10 @@ from langgraph.typing import ContextT from deerflow.agents.lead_agent.prompt import refresh_skills_system_prompt_cache_async from deerflow.agents.thread_state import ThreadState from deerflow.mcp.tools import _make_sync_tool_wrapper -from deerflow.skills.manager import ( - append_history, - atomic_write, - custom_skill_exists, - ensure_custom_skill_is_editable, - ensure_safe_support_path, - get_custom_skill_dir, - get_custom_skill_file, - public_skill_exists, - read_custom_skill_content, - validate_skill_markdown_content, - validate_skill_name, -) from deerflow.skills.security_scanner import scan_skill_content +from deerflow.skills.storage import get_or_new_skill_storage +from deerflow.skills.storage.skill_storage import SkillStorage +from deerflow.skills.types import SKILL_MD_FILE logger = logging.getLogger(__name__) @@ -96,50 +85,50 @@ async def _skill_manage_impl( replace: Replacement text for patch. expected_count: Optional expected number of replacements for patch. """ - name = validate_skill_name(name) + name = SkillStorage.validate_skill_name(name) lock = _get_lock(name) thread_id = _get_thread_id(runtime) + skill_storage = get_or_new_skill_storage() async with lock: if action == "create": - if await _to_thread(custom_skill_exists, name): + if await _to_thread(skill_storage.custom_skill_exists, name): raise ValueError(f"Custom skill '{name}' already exists.") if content is None: raise ValueError("content is required for create.") - await _to_thread(validate_skill_markdown_content, name, content) - scan = await _scan_or_raise(content, executable=False, location=f"{name}/SKILL.md") - skill_file = await _to_thread(get_custom_skill_file, name) - await _to_thread(atomic_write, skill_file, content) + await _to_thread(skill_storage.validate_skill_markdown_content, name, content) + scan = await _scan_or_raise(content, executable=False, location=f"{name}/{SKILL_MD_FILE}") + await _to_thread(skill_storage.write_custom_skill, name, SKILL_MD_FILE, content) await _to_thread( - append_history, + skill_storage.append_history, name, - _history_record(action="create", file_path="SKILL.md", prev_content=None, new_content=content, thread_id=thread_id, scanner=scan), + _history_record(action="create", file_path=SKILL_MD_FILE, prev_content=None, new_content=content, thread_id=thread_id, scanner=scan), ) await refresh_skills_system_prompt_cache_async() return f"Created custom skill '{name}'." if action == "edit": - await _to_thread(ensure_custom_skill_is_editable, name) + await _to_thread(skill_storage.ensure_custom_skill_is_editable, name) if content is None: raise ValueError("content is required for edit.") - await _to_thread(validate_skill_markdown_content, name, content) - scan = await _scan_or_raise(content, executable=False, location=f"{name}/SKILL.md") - skill_file = await _to_thread(get_custom_skill_file, name) + await _to_thread(skill_storage.validate_skill_markdown_content, name, content) + scan = await _scan_or_raise(content, executable=False, location=f"{name}/{SKILL_MD_FILE}") + skill_file = skill_storage.get_custom_skill_file(name) prev_content = await _to_thread(skill_file.read_text, encoding="utf-8") - await _to_thread(atomic_write, skill_file, content) + await _to_thread(skill_storage.write_custom_skill, name, SKILL_MD_FILE, content) await _to_thread( - append_history, + skill_storage.append_history, name, - _history_record(action="edit", file_path="SKILL.md", prev_content=prev_content, new_content=content, thread_id=thread_id, scanner=scan), + _history_record(action="edit", file_path=SKILL_MD_FILE, prev_content=prev_content, new_content=content, thread_id=thread_id, scanner=scan), ) await refresh_skills_system_prompt_cache_async() return f"Updated custom skill '{name}'." if action == "patch": - await _to_thread(ensure_custom_skill_is_editable, name) + await _to_thread(skill_storage.ensure_custom_skill_is_editable, name) if find is None or replace is None: raise ValueError("find and replace are required for patch.") - skill_file = await _to_thread(get_custom_skill_file, name) + skill_file = skill_storage.get_custom_skill_file(name) prev_content = await _to_thread(skill_file.read_text, encoding="utf-8") occurrences = prev_content.count(find) if occurrences == 0: @@ -148,64 +137,67 @@ async def _skill_manage_impl( raise ValueError(f"Expected {expected_count} replacements but found {occurrences}.") replacement_count = expected_count if expected_count is not None else 1 new_content = prev_content.replace(find, replace, replacement_count) - await _to_thread(validate_skill_markdown_content, name, new_content) - scan = await _scan_or_raise(new_content, executable=False, location=f"{name}/SKILL.md") - await _to_thread(atomic_write, skill_file, new_content) + await _to_thread(skill_storage.validate_skill_markdown_content, name, new_content) + scan = await _scan_or_raise(new_content, executable=False, location=f"{name}/{SKILL_MD_FILE}") + await _to_thread(skill_storage.write_custom_skill, name, SKILL_MD_FILE, new_content) await _to_thread( - append_history, + skill_storage.append_history, name, - _history_record(action="patch", file_path="SKILL.md", prev_content=prev_content, new_content=new_content, thread_id=thread_id, scanner=scan), + _history_record(action="patch", file_path=SKILL_MD_FILE, prev_content=prev_content, new_content=new_content, thread_id=thread_id, scanner=scan), ) await refresh_skills_system_prompt_cache_async() return f"Patched custom skill '{name}' ({replacement_count} replacement(s) applied, {occurrences} match(es) found)." if action == "delete": - await _to_thread(ensure_custom_skill_is_editable, name) - skill_dir = await _to_thread(get_custom_skill_dir, name) - prev_content = await _to_thread(read_custom_skill_content, name) await _to_thread( - append_history, + skill_storage.delete_custom_skill, name, - _history_record(action="delete", file_path="SKILL.md", prev_content=prev_content, new_content=None, thread_id=thread_id, scanner={"decision": "allow", "reason": "Deletion requested."}), + history_meta=_history_record( + action="delete", + file_path=SKILL_MD_FILE, + prev_content=None, + new_content=None, + thread_id=thread_id, + scanner={"decision": "allow", "reason": "Deletion requested."}, + ), ) - await _to_thread(shutil.rmtree, skill_dir) await refresh_skills_system_prompt_cache_async() return f"Deleted custom skill '{name}'." if action == "write_file": - await _to_thread(ensure_custom_skill_is_editable, name) + await _to_thread(skill_storage.ensure_custom_skill_is_editable, name) if path is None or content is None: raise ValueError("path and content are required for write_file.") - target = await _to_thread(ensure_safe_support_path, name, path) + target = await _to_thread(skill_storage.ensure_safe_support_path, name, path) exists = await _to_thread(target.exists) prev_content = await _to_thread(target.read_text, encoding="utf-8") if exists else None executable = "scripts/" in path or path.startswith("scripts/") scan = await _scan_or_raise(content, executable=executable, location=f"{name}/{path}") - await _to_thread(atomic_write, target, content) + await _to_thread(skill_storage.write_custom_skill, name, path, content) await _to_thread( - append_history, + skill_storage.append_history, name, _history_record(action="write_file", file_path=path, prev_content=prev_content, new_content=content, thread_id=thread_id, scanner=scan), ) return f"Wrote '{path}' for custom skill '{name}'." if action == "remove_file": - await _to_thread(ensure_custom_skill_is_editable, name) + await _to_thread(skill_storage.ensure_custom_skill_is_editable, name) if path is None: raise ValueError("path is required for remove_file.") - target = await _to_thread(ensure_safe_support_path, name, path) + target = await _to_thread(skill_storage.ensure_safe_support_path, name, path) if not await _to_thread(target.exists): raise FileNotFoundError(f"Supporting file '{path}' not found for skill '{name}'.") prev_content = await _to_thread(target.read_text, encoding="utf-8") await _to_thread(target.unlink) await _to_thread( - append_history, + skill_storage.append_history, name, _history_record(action="remove_file", file_path=path, prev_content=prev_content, new_content=None, thread_id=thread_id, scanner={"decision": "allow", "reason": "Deletion requested."}), ) return f"Removed '{path}' from custom skill '{name}'." - if await _to_thread(public_skill_exists, name): + if await _to_thread(skill_storage.public_skill_exists, name): raise ValueError(f"'{name}' is a built-in skill. To customise it, create a new skill with the same name under skills/custom/.") raise ValueError(f"Unsupported action '{action}'.") diff --git a/backend/packages/harness/deerflow/tools/tools.py b/backend/packages/harness/deerflow/tools/tools.py index 6b027e54e..2ba6eb6b4 100644 --- a/backend/packages/harness/deerflow/tools/tools.py +++ b/backend/packages/harness/deerflow/tools/tools.py @@ -3,6 +3,7 @@ import logging from langchain.tools import BaseTool from deerflow.config import get_app_config +from deerflow.config.app_config import AppConfig from deerflow.reflection import resolve_variable from deerflow.sandbox.security import is_host_bash_allowed from deerflow.tools.builtins import ask_clarification_tool, present_file_tool, task_tool, view_image_tool @@ -37,6 +38,8 @@ def get_available_tools( include_mcp: bool = True, model_name: str | None = None, subagent_enabled: bool = False, + *, + app_config: AppConfig | None = None, ) -> list[BaseTool]: """Get all available tools from config. @@ -52,7 +55,7 @@ def get_available_tools( Returns: List of available tools. """ - config = get_app_config() + config = app_config or get_app_config() tool_configs = [tool for tool in config.tools if groups is None or tool.group in groups] # Do not expose host bash by default when LocalSandboxProvider is active. diff --git a/backend/packages/harness/deerflow/uploads/manager.py b/backend/packages/harness/deerflow/uploads/manager.py index 8c60399e7..c36151b38 100644 --- a/backend/packages/harness/deerflow/uploads/manager.py +++ b/backend/packages/harness/deerflow/uploads/manager.py @@ -10,6 +10,7 @@ from pathlib import Path from urllib.parse import quote from deerflow.config.paths import VIRTUAL_PATH_PREFIX, get_paths +from deerflow.runtime.user_context import get_effective_user_id class PathTraversalError(ValueError): @@ -33,7 +34,7 @@ def validate_thread_id(thread_id: str) -> None: def get_uploads_dir(thread_id: str) -> Path: """Return the uploads directory path for a thread (no side effects).""" validate_thread_id(thread_id) - return get_paths().sandbox_uploads_dir(thread_id) + return get_paths().sandbox_uploads_dir(thread_id, user_id=get_effective_user_id()) def ensure_uploads_dir(thread_id: str) -> Path: diff --git a/backend/packages/harness/pyproject.toml b/backend/packages/harness/pyproject.toml index e7a81ff7b..47cd1afad 100644 --- a/backend/packages/harness/pyproject.toml +++ b/backend/packages/harness/pyproject.toml @@ -10,16 +10,16 @@ dependencies = [ "exa-py>=1.0.0", "httpx>=0.28.0", "kubernetes>=30.0.0", - "langchain>=1.2.3", - "langchain-anthropic>=1.3.4", + "langchain>=1.2.15", + "langchain-anthropic>=1.4.1", "langchain-deepseek>=1.0.1", - "langchain-mcp-adapters>=0.1.0", - "langchain-openai>=1.1.7", + "langchain-mcp-adapters>=0.2.2", + "langchain-openai>=1.2.1", "langfuse>=3.4.1", - "langgraph>=1.0.6,<1.0.10", - "langgraph-api>=0.7.0,<0.8.0", - "langgraph-cli>=0.4.14", - "langgraph-runtime-inmem>=0.22.1", + "langgraph>=1.1.9", + "langgraph-api>=0.8.1", + "langgraph-cli>=0.4.24", + "langgraph-runtime-inmem>=0.28.0", "markdownify>=1.2.2", "markitdown[all,xlsx]>=0.0.1a2", "pydantic>=2.12.5", @@ -33,10 +33,19 @@ dependencies = [ "langchain-google-genai>=4.2.1", "langgraph-checkpoint-sqlite>=3.0.3", "langgraph-sdk>=0.1.51", + "sqlalchemy[asyncio]>=2.0,<3.0", + "aiosqlite>=0.19", + "alembic>=1.13", ] [project.optional-dependencies] ollama = ["langchain-ollama>=0.3.0"] +postgres = [ + "asyncpg>=0.29", + "langgraph-checkpoint-postgres>=3.0.5", + "psycopg[binary]>=3.3.3", + "psycopg-pool>=3.3.0", +] pymupdf = ["pymupdf4llm>=0.0.17"] [build-system] diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 2b2e43baa..64c6e74c3 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -17,13 +17,34 @@ dependencies = [ "langgraph-sdk>=0.1.51", "markdown-to-mrkdwn>=0.3.1", "wecom-aibot-python-sdk>=0.1.6", + "dingtalk-stream>=0.24.3", + "bcrypt>=4.0.0", + "pyjwt>=2.9.0", + "email-validator>=2.0.0", ] +[project.optional-dependencies] +postgres = ["deerflow-harness[postgres]"] + [dependency-groups] -dev = ["pytest>=9.0.3", "ruff>=0.14.11"] +dev = [ + "prompt-toolkit>=3.0.0", + "pytest>=9.0.3", + "pytest-asyncio>=1.3.0", + "ruff>=0.14.11", +] + +[tool.pytest.ini_options] +markers = [ + "no_auto_user: disable the conftest autouse contextvar fixture for this test", +] + +[tool.uv] +index-url = "https://pypi.org/simple" [tool.uv.workspace] members = ["packages/harness"] [tool.uv.sources] deerflow-harness = { workspace = true } + diff --git a/backend/scripts/migrate_user_isolation.py b/backend/scripts/migrate_user_isolation.py new file mode 100644 index 000000000..82923e4b7 --- /dev/null +++ b/backend/scripts/migrate_user_isolation.py @@ -0,0 +1,159 @@ +"""One-time migration: move legacy thread dirs and memory into per-user layout. + +Usage: + PYTHONPATH=. python scripts/migrate_user_isolation.py [--dry-run] + +The script is idempotent — re-running it after a successful migration is a no-op. +""" + +import argparse +import logging +import shutil + +from deerflow.config.paths import Paths, get_paths + +logger = logging.getLogger(__name__) + + +def migrate_thread_dirs( + paths: Paths, + thread_owner_map: dict[str, str], + *, + dry_run: bool = False, +) -> list[dict]: + """Move legacy thread directories into per-user layout. + + Args: + paths: Paths instance. + thread_owner_map: Mapping of thread_id -> user_id from threads_meta table. + dry_run: If True, only log what would happen. + + Returns: + List of migration report entries. + """ + report: list[dict] = [] + legacy_threads = paths.base_dir / "threads" + if not legacy_threads.exists(): + logger.info("No legacy threads directory found — nothing to migrate.") + return report + + for thread_dir in sorted(legacy_threads.iterdir()): + if not thread_dir.is_dir(): + continue + thread_id = thread_dir.name + user_id = thread_owner_map.get(thread_id, "default") + dest = paths.base_dir / "users" / user_id / "threads" / thread_id + + entry = {"thread_id": thread_id, "user_id": user_id, "action": ""} + + if dest.exists(): + conflicts_dir = paths.base_dir / "migration-conflicts" / thread_id + entry["action"] = f"conflict -> {conflicts_dir}" + if not dry_run: + conflicts_dir.parent.mkdir(parents=True, exist_ok=True) + shutil.move(str(thread_dir), str(conflicts_dir)) + logger.warning("Conflict for thread %s: moved to %s", thread_id, conflicts_dir) + else: + entry["action"] = f"moved -> {dest}" + if not dry_run: + dest.parent.mkdir(parents=True, exist_ok=True) + shutil.move(str(thread_dir), str(dest)) + logger.info("Migrated thread %s -> user %s", thread_id, user_id) + + report.append(entry) + + # Clean up empty legacy threads dir + if not dry_run and legacy_threads.exists() and not any(legacy_threads.iterdir()): + legacy_threads.rmdir() + + return report + + +def migrate_memory( + paths: Paths, + user_id: str = "default", + *, + dry_run: bool = False, +) -> None: + """Move legacy global memory.json into per-user layout. + + Args: + paths: Paths instance. + user_id: Target user to receive the legacy memory. + dry_run: If True, only log. + """ + legacy_mem = paths.base_dir / "memory.json" + if not legacy_mem.exists(): + logger.info("No legacy memory.json found — nothing to migrate.") + return + + dest = paths.user_memory_file(user_id) + if dest.exists(): + legacy_backup = paths.base_dir / "memory.legacy.json" + logger.warning("Destination %s exists; renaming legacy to %s", dest, legacy_backup) + if not dry_run: + legacy_mem.rename(legacy_backup) + return + + logger.info("Migrating memory.json -> %s", dest) + if not dry_run: + dest.parent.mkdir(parents=True, exist_ok=True) + shutil.move(str(legacy_mem), str(dest)) + + +def _build_owner_map_from_db(paths: Paths) -> dict[str, str]: + """Query threads_meta table for thread_id -> user_id mapping. + + Uses raw sqlite3 to avoid async dependencies. + """ + import sqlite3 + + db_path = paths.base_dir / "deer-flow.db" + if not db_path.exists(): + logger.info("No database found at %s — using empty owner map.", db_path) + return {} + + conn = sqlite3.connect(str(db_path)) + try: + cursor = conn.execute("SELECT thread_id, user_id FROM threads_meta WHERE user_id IS NOT NULL") + return {row[0]: row[1] for row in cursor.fetchall()} + except sqlite3.OperationalError as e: + logger.warning("Failed to query threads_meta: %s", e) + return {} + finally: + conn.close() + + +def main() -> None: + parser = argparse.ArgumentParser(description="Migrate DeerFlow data to per-user layout") + parser.add_argument("--dry-run", action="store_true", help="Log actions without making changes") + args = parser.parse_args() + + logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") + + paths = get_paths() + logger.info("Base directory: %s", paths.base_dir) + logger.info("Dry run: %s", args.dry_run) + + owner_map = _build_owner_map_from_db(paths) + logger.info("Found %d thread ownership records in DB", len(owner_map)) + + report = migrate_thread_dirs(paths, owner_map, dry_run=args.dry_run) + migrate_memory(paths, user_id="default", dry_run=args.dry_run) + + if report: + logger.info("Migration report:") + for entry in report: + logger.info(" thread=%s user=%s action=%s", entry["thread_id"], entry["user_id"], entry["action"]) + else: + logger.info("No threads to migrate.") + + unowned = [e for e in report if e["user_id"] == "default"] + if unowned: + logger.warning("%d thread(s) had no owner and were assigned to 'default':", len(unowned)) + for e in unowned: + logger.warning(" %s", e["thread_id"]) + + +if __name__ == "__main__": + main() diff --git a/backend/tests/_router_auth_helpers.py b/backend/tests/_router_auth_helpers.py new file mode 100644 index 000000000..2bd2ebdee --- /dev/null +++ b/backend/tests/_router_auth_helpers.py @@ -0,0 +1,129 @@ +"""Helpers for router-level tests that need a stubbed auth context. + +The production gateway runs ``AuthMiddleware`` (validates the JWT cookie) +ahead of every router, plus ``@require_permission(owner_check=True)`` +decorators that read ``request.state.auth`` and call +``thread_store.check_access``. Router-level unit tests construct +**bare** FastAPI apps that include only one router — they have neither +the auth middleware nor a real thread_store, so the decorators raise +401 (TestClient path) or ValueError (direct-call path). + +This module provides two surfaces: + +1. :func:`make_authed_test_app` — wraps ``FastAPI()`` with a tiny + ``BaseHTTPMiddleware`` that stamps a fake user / AuthContext on every + request, plus a permissive ``thread_store`` mock on + ``app.state``. Use from TestClient-based router tests. + +2. :func:`call_unwrapped` — invokes the underlying function bypassing + the ``@require_permission`` decorator chain by walking ``__wrapped__``. + Use from direct-call tests that previously imported the route + function and called it positionally. + +Both helpers are deliberately permissive: they never deny a request. +Tests that want to verify the *auth boundary itself* (e.g. +``test_auth_middleware``, ``test_auth_type_system``) build their own +apps with the real middleware — those should not use this module. +""" + +from __future__ import annotations + +from collections.abc import Callable +from unittest.mock import AsyncMock, MagicMock +from uuid import uuid4 + +from fastapi import FastAPI, Request, Response +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.types import ASGIApp + +from app.gateway.auth.models import User +from app.gateway.authz import AuthContext, Permissions + +# Default permission set granted to the stub user. Mirrors `_ALL_PERMISSIONS` +# in authz.py — kept inline so the tests don't import a private symbol. +_STUB_PERMISSIONS: list[str] = [ + Permissions.THREADS_READ, + Permissions.THREADS_WRITE, + Permissions.THREADS_DELETE, + Permissions.RUNS_CREATE, + Permissions.RUNS_READ, + Permissions.RUNS_CANCEL, +] + + +def _make_stub_user() -> User: + """A deterministic test user — same shape as production, fresh UUID.""" + return User( + email="router-test@example.com", + password_hash="x", + system_role="user", + id=uuid4(), + ) + + +class _StubAuthMiddleware(BaseHTTPMiddleware): + """Stamp a fake user / AuthContext onto every request. + + Mirrors what production ``AuthMiddleware`` does after the JWT decode + + DB lookup short-circuit, so ``@require_permission`` finds an + authenticated context and skips its own re-authentication path. + """ + + def __init__(self, app: ASGIApp, user_factory: Callable[[], User]) -> None: + super().__init__(app) + self._user_factory = user_factory + + async def dispatch(self, request: Request, call_next: Callable) -> Response: + user = self._user_factory() + request.state.user = user + request.state.auth = AuthContext(user=user, permissions=list(_STUB_PERMISSIONS)) + return await call_next(request) + + +def make_authed_test_app( + *, + user_factory: Callable[[], User] | None = None, + owner_check_passes: bool = True, +) -> FastAPI: + """Build a FastAPI test app with stub auth + permissive thread_store. + + Args: + user_factory: Override the default test user. Must return a fully + populated :class:`User`. Useful for cross-user isolation tests + that need a stable id across requests. + owner_check_passes: When True (default), ``thread_store.check_access`` + returns True for every call so ``@require_permission(owner_check=True)`` + never blocks the route under test. Pass False to verify that + permission failures surface correctly. + + Returns: + A ``FastAPI`` app with the stub middleware installed and + ``app.state.thread_store`` set to a permissive mock. The + caller is still responsible for ``app.include_router(...)``. + """ + factory = user_factory or _make_stub_user + app = FastAPI() + app.add_middleware(_StubAuthMiddleware, user_factory=factory) + + repo = MagicMock() + repo.check_access = AsyncMock(return_value=owner_check_passes) + app.state.thread_store = repo + + return app + + +def call_unwrapped[*P, R](decorated: Callable[P, R], /, *args: P.args, **kwargs: P.kwargs) -> R: + """Invoke the underlying function of a ``@require_permission``-decorated route. + + ``functools.wraps`` sets ``__wrapped__`` on each layer; we walk all + the way down to the original handler, bypassing every authz + + require_auth wrapper. Use from tests that need to call route + functions directly (without TestClient) and don't want to construct + a fake ``Request`` just to satisfy the decorator. The ``ParamSpec`` + propagates the wrapped route's signature so call sites still get + parameter checking despite the unwrapping. + """ + fn: Callable = decorated + while hasattr(fn, "__wrapped__"): + fn = fn.__wrapped__ # type: ignore[attr-defined] + return fn(*args, **kwargs) diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py index 997f42577..a357a3962 100644 --- a/backend/tests/conftest.py +++ b/backend/tests/conftest.py @@ -7,6 +7,7 @@ issues when unit-testing lightweight config/registry code in isolation. import importlib.util import sys from pathlib import Path +from types import SimpleNamespace from unittest.mock import MagicMock import pytest @@ -53,3 +54,59 @@ def provisioner_module(): module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) return module + + +# --------------------------------------------------------------------------- +# Auto-set user context for every test unless marked no_auto_user +# --------------------------------------------------------------------------- +# +# Repository methods read ``user_id`` from a contextvar by default +# (see ``deerflow.runtime.user_context``). Without this fixture, every +# pre-existing persistence test would raise RuntimeError because the +# contextvar is unset. The fixture sets a default test user on every +# test; tests that explicitly want to verify behaviour *without* a user +# context should mark themselves ``@pytest.mark.no_auto_user``. + + +@pytest.fixture(autouse=True) +def _reset_skill_storage_singleton(): + """Reset the SkillStorage singleton between tests to prevent cross-test contamination.""" + try: + from deerflow.skills.storage import reset_skill_storage + except ImportError: + yield + return + reset_skill_storage() + try: + yield + finally: + reset_skill_storage() + + +@pytest.fixture(autouse=True) +def _auto_user_context(request): + """Inject a default ``test-user-autouse`` into the contextvar. + + Opt-out via ``@pytest.mark.no_auto_user``. Uses lazy import so that + tests which don't touch the persistence layer never pay the cost + of importing runtime.user_context. + """ + if request.node.get_closest_marker("no_auto_user"): + yield + return + + try: + from deerflow.runtime.user_context import ( + reset_current_user, + set_current_user, + ) + except ImportError: + yield + return + + user = SimpleNamespace(id="test-user-autouse", email="test@local") + token = set_current_user(user) + try: + yield + finally: + reset_current_user(token) diff --git a/backend/tests/test_aio_sandbox_local_backend.py b/backend/tests/test_aio_sandbox_local_backend.py index d0b99bec1..333c3eb53 100644 --- a/backend/tests/test_aio_sandbox_local_backend.py +++ b/backend/tests/test_aio_sandbox_local_backend.py @@ -1,4 +1,14 @@ -from deerflow.community.aio_sandbox.local_backend import _format_container_mount +import logging +import os +from types import SimpleNamespace + +from deerflow.community.aio_sandbox.local_backend import ( + LocalContainerBackend, + _format_container_command_for_log, + _format_container_mount, + _redact_container_command_for_log, + _resolve_docker_bind_host, +) def test_format_container_mount_uses_mount_syntax_for_docker_windows_paths(): @@ -26,3 +36,201 @@ def test_format_container_mount_keeps_volume_syntax_for_apple_container(): "-v", "/host/path:/mnt/path:ro", ] + + +def test_redact_container_command_for_log_redacts_env_values(): + redacted = _redact_container_command_for_log( + [ + "docker", + "run", + "-e", + "API_KEY=secret-value", + "--env=TOKEN=token-value", + "--name", + "sandbox", + "image", + ] + ) + + assert "API_KEY=" in redacted + assert "--env=TOKEN=" in redacted + assert "secret-value" not in " ".join(redacted) + assert "token-value" not in " ".join(redacted) + + +def test_redact_container_command_for_log_keeps_inherited_env_names(): + redacted = _redact_container_command_for_log( + [ + "docker", + "run", + "-e", + "API_KEY", + "--env=TOKEN", + "--name", + "sandbox", + "image", + ] + ) + + assert redacted == [ + "docker", + "run", + "-e", + "API_KEY", + "--env=TOKEN", + "--name", + "sandbox", + "image", + ] + + +def test_format_container_command_for_log_uses_windows_quoting(monkeypatch): + monkeypatch.setattr(os, "name", "nt") + + command = _format_container_command_for_log(["docker", "run", "--name", "sandbox one", "image"]) + + assert command == 'docker run --name "sandbox one" image' + + +def test_start_container_logs_redacted_env_values(monkeypatch, caplog): + backend = LocalContainerBackend( + image="sandbox:latest", + base_port=8080, + container_prefix="sandbox", + config_mounts=[], + environment={"API_KEY": "secret-value", "NORMAL": "visible-value"}, + ) + monkeypatch.setattr(backend, "_runtime", "docker") + + captured_cmd: list[str] = [] + + def fake_run(cmd, **kwargs): + captured_cmd.extend(cmd) + return SimpleNamespace(stdout="container-id\n", stderr="", returncode=0) + + monkeypatch.setattr("subprocess.run", fake_run) + + with caplog.at_level(logging.INFO, logger="deerflow.community.aio_sandbox.local_backend"): + backend._start_container("sandbox-test", 18080) + + joined_cmd = " ".join(captured_cmd) + assert "API_KEY=secret-value" in joined_cmd + assert "NORMAL=visible-value" in joined_cmd + + log_output = "\n".join(record.getMessage() for record in caplog.records) + assert "API_KEY=" in log_output + assert "NORMAL=" in log_output + assert "secret-value" not in log_output + assert "visible-value" not in log_output + + +def _capture_start_container_command(monkeypatch, backend: LocalContainerBackend, runtime: str = "docker") -> list[str]: + monkeypatch.setattr(backend, "_runtime", runtime) + captured_cmd: list[str] = [] + + def fake_run(cmd, **kwargs): + captured_cmd.extend(cmd) + return SimpleNamespace(stdout="container-id\n", stderr="", returncode=0) + + monkeypatch.setattr("subprocess.run", fake_run) + backend._start_container("sandbox-test", 18080) + return captured_cmd + + +def test_resolve_docker_bind_host_defaults_loopback_for_localhost(monkeypatch): + monkeypatch.delenv("DEER_FLOW_SANDBOX_BIND_HOST", raising=False) + monkeypatch.delenv("DEER_FLOW_SANDBOX_HOST", raising=False) + + assert _resolve_docker_bind_host() == "127.0.0.1" + + +def test_resolve_docker_bind_host_keeps_dood_compatibility(monkeypatch): + monkeypatch.delenv("DEER_FLOW_SANDBOX_BIND_HOST", raising=False) + monkeypatch.setenv("DEER_FLOW_SANDBOX_HOST", "host.docker.internal") + + assert _resolve_docker_bind_host() == "0.0.0.0" + + +def test_resolve_docker_bind_host_uses_ipv6_loopback_for_ipv6_sandbox_host(monkeypatch): + monkeypatch.delenv("DEER_FLOW_SANDBOX_BIND_HOST", raising=False) + monkeypatch.setenv("DEER_FLOW_SANDBOX_HOST", "[::1]") + + assert _resolve_docker_bind_host() == "[::1]" + + +def test_resolve_docker_bind_host_logs_selected_bind_reason(caplog): + with caplog.at_level(logging.DEBUG, logger="deerflow.community.aio_sandbox.local_backend"): + assert _resolve_docker_bind_host(sandbox_host="localhost", bind_host="") == "127.0.0.1" + + messages = "\n".join(record.getMessage() for record in caplog.records) + assert "Docker sandbox bind: 127.0.0.1 (loopback default)" in messages + + +def test_resolve_docker_bind_host_allows_explicit_override(monkeypatch): + monkeypatch.setenv("DEER_FLOW_SANDBOX_HOST", "localhost") + monkeypatch.setenv("DEER_FLOW_SANDBOX_BIND_HOST", "192.0.2.10") + + assert _resolve_docker_bind_host() == "192.0.2.10" + + +def test_start_container_binds_local_docker_port_to_loopback_by_default(monkeypatch): + backend = LocalContainerBackend( + image="sandbox:latest", + base_port=8080, + container_prefix="sandbox", + config_mounts=[], + environment={}, + ) + monkeypatch.delenv("DEER_FLOW_SANDBOX_HOST", raising=False) + monkeypatch.delenv("DEER_FLOW_SANDBOX_BIND_HOST", raising=False) + + captured_cmd = _capture_start_container_command(monkeypatch, backend) + + assert captured_cmd[captured_cmd.index("-p") + 1] == "127.0.0.1:18080:8080" + + +def test_start_container_keeps_broad_bind_for_dood_sandbox_host(monkeypatch): + backend = LocalContainerBackend( + image="sandbox:latest", + base_port=8080, + container_prefix="sandbox", + config_mounts=[], + environment={}, + ) + monkeypatch.setenv("DEER_FLOW_SANDBOX_HOST", "host.docker.internal") + monkeypatch.delenv("DEER_FLOW_SANDBOX_BIND_HOST", raising=False) + + captured_cmd = _capture_start_container_command(monkeypatch, backend) + + assert captured_cmd[captured_cmd.index("-p") + 1] == "0.0.0.0:18080:8080" + + +def test_start_container_binds_ipv6_sandbox_host_to_ipv6_loopback(monkeypatch): + backend = LocalContainerBackend( + image="sandbox:latest", + base_port=8080, + container_prefix="sandbox", + config_mounts=[], + environment={}, + ) + monkeypatch.setenv("DEER_FLOW_SANDBOX_HOST", "[::1]") + monkeypatch.delenv("DEER_FLOW_SANDBOX_BIND_HOST", raising=False) + + captured_cmd = _capture_start_container_command(monkeypatch, backend) + + assert captured_cmd[captured_cmd.index("-p") + 1] == "[::1]:18080:8080" + + +def test_start_container_keeps_apple_container_port_format(monkeypatch): + backend = LocalContainerBackend( + image="sandbox:latest", + base_port=8080, + container_prefix="sandbox", + config_mounts=[], + environment={}, + ) + monkeypatch.setenv("DEER_FLOW_SANDBOX_BIND_HOST", "127.0.0.1") + + captured_cmd = _capture_start_container_command(monkeypatch, backend, runtime="container") + + assert captured_cmd[captured_cmd.index("-p") + 1] == "18080:8080" diff --git a/backend/tests/test_aio_sandbox_provider.py b/backend/tests/test_aio_sandbox_provider.py index e797cf7e3..c7984531f 100644 --- a/backend/tests/test_aio_sandbox_provider.py +++ b/backend/tests/test_aio_sandbox_provider.py @@ -57,6 +57,7 @@ def test_get_thread_mounts_includes_acp_workspace(tmp_path, monkeypatch): """_get_thread_mounts must include /mnt/acp-workspace (read-only) for docker sandbox.""" aio_mod = importlib.import_module("deerflow.community.aio_sandbox.aio_sandbox_provider") monkeypatch.setattr(aio_mod, "get_paths", lambda: Paths(base_dir=tmp_path)) + monkeypatch.setattr(aio_mod, "get_effective_user_id", lambda: None) mounts = aio_mod.AioSandboxProvider._get_thread_mounts("thread-3") @@ -95,6 +96,7 @@ def test_get_thread_mounts_preserves_windows_host_path_style(tmp_path, monkeypat aio_mod = importlib.import_module("deerflow.community.aio_sandbox.aio_sandbox_provider") monkeypatch.setenv("DEER_FLOW_HOST_BASE_DIR", r"C:\Users\demo\deer-flow\backend\.deer-flow") monkeypatch.setattr(aio_mod, "get_paths", lambda: Paths(base_dir=tmp_path)) + monkeypatch.setattr(aio_mod, "get_effective_user_id", lambda: None) mounts = aio_mod.AioSandboxProvider._get_thread_mounts("thread-10") diff --git a/backend/tests/test_app_config_reload.py b/backend/tests/test_app_config_reload.py index 9e865f142..31e571afe 100644 --- a/backend/tests/test_app_config_reload.py +++ b/backend/tests/test_app_config_reload.py @@ -7,7 +7,7 @@ from pathlib import Path import yaml from deerflow.config.agents_api_config import get_agents_api_config -from deerflow.config.app_config import get_app_config, reset_app_config +from deerflow.config.app_config import AppConfig, get_app_config, reset_app_config def _write_config(path: Path, *, model_name: str, supports_thinking: bool) -> None: @@ -57,6 +57,42 @@ def _write_extensions_config(path: Path) -> None: path.write_text(json.dumps({"mcpServers": {}, "skills": {}}), encoding="utf-8") +def test_app_config_defaults_missing_database_to_sqlite(tmp_path, monkeypatch): + config_path = tmp_path / "config.yaml" + extensions_path = tmp_path / "extensions_config.json" + _write_extensions_config(extensions_path) + _write_config(config_path, model_name="first-model", supports_thinking=False) + + monkeypatch.setenv("DEER_FLOW_EXTENSIONS_CONFIG_PATH", str(extensions_path)) + + config = AppConfig.from_file(str(config_path)) + + assert config.database.backend == "sqlite" + assert config.database.sqlite_dir == ".deer-flow/data" + + +def test_app_config_defaults_empty_database_to_sqlite(tmp_path, monkeypatch): + config_path = tmp_path / "config.yaml" + extensions_path = tmp_path / "extensions_config.json" + _write_extensions_config(extensions_path) + config_path.write_text( + yaml.safe_dump( + { + "database": {}, + "sandbox": {"use": "deerflow.sandbox.local:LocalSandboxProvider"}, + } + ), + encoding="utf-8", + ) + + monkeypatch.setenv("DEER_FLOW_EXTENSIONS_CONFIG_PATH", str(extensions_path)) + + config = AppConfig.from_file(str(config_path)) + + assert config.database.backend == "sqlite" + assert config.database.sqlite_dir == ".deer-flow/data" + + def test_get_app_config_reloads_when_file_changes(tmp_path, monkeypatch): config_path = tmp_path / "config.yaml" extensions_path = tmp_path / "extensions_config.json" diff --git a/backend/tests/test_artifacts_router.py b/backend/tests/test_artifacts_router.py index 9a30ff44e..df32e45dc 100644 --- a/backend/tests/test_artifacts_router.py +++ b/backend/tests/test_artifacts_router.py @@ -3,7 +3,7 @@ import zipfile from pathlib import Path import pytest -from fastapi import FastAPI +from _router_auth_helpers import call_unwrapped, make_authed_test_app from fastapi.testclient import TestClient from starlette.requests import Request from starlette.responses import FileResponse @@ -36,7 +36,7 @@ def test_get_artifact_reads_utf8_text_file_on_windows_locale(tmp_path, monkeypat monkeypatch.setattr(artifacts_router, "resolve_thread_virtual_path", lambda _thread_id, _path: artifact_path) request = _make_request() - response = asyncio.run(artifacts_router.get_artifact("thread-1", "mnt/user-data/outputs/note.txt", request)) + response = asyncio.run(call_unwrapped(artifacts_router.get_artifact, "thread-1", "mnt/user-data/outputs/note.txt", request)) assert bytes(response.body).decode("utf-8") == text assert response.media_type == "text/plain" @@ -49,7 +49,7 @@ def test_get_artifact_forces_download_for_active_content(tmp_path, monkeypatch, monkeypatch.setattr(artifacts_router, "resolve_thread_virtual_path", lambda _thread_id, _path: artifact_path) - response = asyncio.run(artifacts_router.get_artifact("thread-1", f"mnt/user-data/outputs/{filename}", _make_request())) + response = asyncio.run(call_unwrapped(artifacts_router.get_artifact, "thread-1", f"mnt/user-data/outputs/{filename}", _make_request())) assert isinstance(response, FileResponse) assert response.headers.get("content-disposition", "").startswith("attachment;") @@ -63,7 +63,7 @@ def test_get_artifact_forces_download_for_active_content_in_skill_archive(tmp_pa monkeypatch.setattr(artifacts_router, "resolve_thread_virtual_path", lambda _thread_id, _path: skill_path) - response = asyncio.run(artifacts_router.get_artifact("thread-1", f"mnt/user-data/outputs/sample.skill/{filename}", _make_request())) + response = asyncio.run(call_unwrapped(artifacts_router.get_artifact, "thread-1", f"mnt/user-data/outputs/sample.skill/{filename}", _make_request())) assert response.headers.get("content-disposition", "").startswith("attachment;") assert bytes(response.body) == content.encode("utf-8") @@ -75,7 +75,7 @@ def test_get_artifact_download_false_does_not_force_attachment(tmp_path, monkeyp monkeypatch.setattr(artifacts_router, "resolve_thread_virtual_path", lambda _thread_id, _path: artifact_path) - app = FastAPI() + app = make_authed_test_app() app.include_router(artifacts_router.router) with TestClient(app) as client: @@ -93,7 +93,7 @@ def test_get_artifact_download_true_forces_attachment_for_skill_archive(tmp_path monkeypatch.setattr(artifacts_router, "resolve_thread_virtual_path", lambda _thread_id, _path: skill_path) - app = FastAPI() + app = make_authed_test_app() app.include_router(artifacts_router.router) with TestClient(app) as client: diff --git a/backend/tests/test_auth.py b/backend/tests/test_auth.py new file mode 100644 index 000000000..f19c83c7d --- /dev/null +++ b/backend/tests/test_auth.py @@ -0,0 +1,751 @@ +"""Tests for authentication module: JWT, password hashing, AuthContext, and authz decorators.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock, patch +from uuid import uuid4 + +import bcrypt +import pytest +from fastapi import FastAPI, HTTPException +from fastapi.testclient import TestClient + +from app.gateway.auth import create_access_token, decode_token, hash_password, verify_password +from app.gateway.auth.models import User +from app.gateway.auth.password import needs_rehash +from app.gateway.authz import ( + AuthContext, + Permissions, + get_auth_context, + require_auth, + require_permission, +) + +# ── Password Hashing ──────────────────────────────────────────────────────── + + +def test_hash_password_and_verify(): + """Hashing and verification round-trip.""" + password = "s3cr3tP@ssw0rd!" + hashed = hash_password(password) + assert hashed != password + assert hashed.startswith("$dfv2$") + assert verify_password(password, hashed) is True + assert verify_password("wrongpassword", hashed) is False + + +def test_hash_password_different_each_time(): + """bcrypt generates unique salts, so same password has different hashes.""" + password = "testpassword" + h1 = hash_password(password) + h2 = hash_password(password) + assert h1 != h2 # Different salts + # But both verify correctly + assert verify_password(password, h1) is True + assert verify_password(password, h2) is True + + +def test_verify_password_rejects_empty(): + """Empty password should not verify.""" + hashed = hash_password("nonempty") + assert verify_password("", hashed) is False + + +def test_hash_produces_v2_prefix(): + """hash_password output starts with $dfv2$.""" + hashed = hash_password("anypassword123") + assert hashed.startswith("$dfv2$") + + +def test_verify_v1_prefixed_hash(): + """verify_password handles $dfv1$ prefixed hashes (plain bcrypt).""" + password = "legacyP@ssw0rd" + raw_bcrypt = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8") + v1_hash = f"$dfv1${raw_bcrypt}" + assert verify_password(password, v1_hash) is True + assert verify_password("wrong", v1_hash) is False + + +def test_verify_bare_bcrypt_hash(): + """verify_password handles bare bcrypt hashes (no prefix) as v1.""" + password = "oldstyleP@ss" + raw_bcrypt = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8") + assert verify_password(password, raw_bcrypt) is True + assert verify_password("wrong", raw_bcrypt) is False + + +def test_needs_rehash_returns_false_for_v2(): + """v2 hashes do not need rehashing.""" + hashed = hash_password("something") + assert needs_rehash(hashed) is False + + +def test_needs_rehash_returns_true_for_v1(): + """v1-prefixed hashes need rehashing.""" + raw = bcrypt.hashpw(b"pw", bcrypt.gensalt()).decode("utf-8") + assert needs_rehash(f"$dfv1${raw}") is True + + +def test_needs_rehash_returns_true_for_bare_bcrypt(): + """Bare bcrypt hashes (no prefix) need rehashing.""" + raw = bcrypt.hashpw(b"pw", bcrypt.gensalt()).decode("utf-8") + assert needs_rehash(raw) is True + + +# ── JWT ───────────────────────────────────────────────────────────────────── + + +def test_create_and_decode_token(): + """JWT creation and decoding round-trip.""" + user_id = str(uuid4()) + # Set a valid JWT secret for this test + import os + + os.environ["AUTH_JWT_SECRET"] = "test-secret-key-for-jwt-testing-minimum-32-chars" + token = create_access_token(user_id) + assert isinstance(token, str) + + payload = decode_token(token) + assert payload is not None + assert payload.sub == user_id + + +def test_decode_token_expired(): + """Expired token returns TokenError.EXPIRED.""" + from app.gateway.auth.errors import TokenError + + user_id = str(uuid4()) + # Create token that expires immediately + token = create_access_token(user_id, expires_delta=timedelta(seconds=-1)) + payload = decode_token(token) + assert payload == TokenError.EXPIRED + + +def test_decode_token_invalid(): + """Invalid token returns TokenError.""" + from app.gateway.auth.errors import TokenError + + assert isinstance(decode_token("not.a.valid.token"), TokenError) + assert isinstance(decode_token(""), TokenError) + assert isinstance(decode_token("completely-wrong"), TokenError) + + +def test_create_token_custom_expiry(): + """Custom expiry is respected.""" + user_id = str(uuid4()) + token = create_access_token(user_id, expires_delta=timedelta(hours=1)) + payload = decode_token(token) + assert payload is not None + assert payload.sub == user_id + + +# ── AuthContext ──────────────────────────────────────────────────────────── + + +def test_auth_context_unauthenticated(): + """AuthContext with no user.""" + ctx = AuthContext(user=None, permissions=[]) + assert ctx.is_authenticated is False + assert ctx.has_permission("threads", "read") is False + + +def test_auth_context_authenticated_no_perms(): + """AuthContext with user but no permissions.""" + user = User(id=uuid4(), email="test@example.com", password_hash="hash") + ctx = AuthContext(user=user, permissions=[]) + assert ctx.is_authenticated is True + assert ctx.has_permission("threads", "read") is False + + +def test_auth_context_has_permission(): + """AuthContext permission checking.""" + user = User(id=uuid4(), email="test@example.com", password_hash="hash") + perms = [Permissions.THREADS_READ, Permissions.THREADS_WRITE] + ctx = AuthContext(user=user, permissions=perms) + assert ctx.has_permission("threads", "read") is True + assert ctx.has_permission("threads", "write") is True + assert ctx.has_permission("threads", "delete") is False + assert ctx.has_permission("runs", "read") is False + + +def test_auth_context_require_user_raises(): + """require_user raises 401 when not authenticated.""" + ctx = AuthContext(user=None, permissions=[]) + with pytest.raises(HTTPException) as exc_info: + ctx.require_user() + assert exc_info.value.status_code == 401 + + +def test_auth_context_require_user_returns_user(): + """require_user returns user when authenticated.""" + user = User(id=uuid4(), email="test@example.com", password_hash="hash") + ctx = AuthContext(user=user, permissions=[]) + returned = ctx.require_user() + assert returned == user + + +# ── get_auth_context helper ───────────────────────────────────────────────── + + +def test_get_auth_context_not_set(): + """get_auth_context returns None when auth not set on request.""" + mock_request = MagicMock() + # Make getattr return None (simulating attribute not set) + mock_request.state = MagicMock() + del mock_request.state.auth + assert get_auth_context(mock_request) is None + + +def test_get_auth_context_set(): + """get_auth_context returns the AuthContext from request.""" + user = User(id=uuid4(), email="test@example.com", password_hash="hash") + ctx = AuthContext(user=user, permissions=[Permissions.THREADS_READ]) + + mock_request = MagicMock() + mock_request.state.auth = ctx + + assert get_auth_context(mock_request) == ctx + + +# ── require_auth decorator ────────────────────────────────────────────────── + + +def test_require_auth_sets_auth_context(): + """require_auth rejects unauthenticated requests with 401.""" + from fastapi import Request + + app = FastAPI() + + @app.get("/test") + @require_auth + async def endpoint(request: Request): + ctx = get_auth_context(request) + return {"authenticated": ctx.is_authenticated} + + with TestClient(app) as client: + # No cookie → 401 (require_auth independently enforces authentication) + response = client.get("/test") + assert response.status_code == 401 + + +def test_require_auth_requires_request_param(): + """require_auth raises ValueError if request parameter is missing.""" + import asyncio + + @require_auth + async def bad_endpoint(): # Missing `request` parameter + pass + + with pytest.raises(ValueError, match="require_auth decorator requires 'request' parameter"): + asyncio.run(bad_endpoint()) + + +# ── require_permission decorator ───────────────────────────────────────────── + + +def test_require_permission_requires_auth(): + """require_permission raises 401 when not authenticated.""" + from fastapi import Request + + app = FastAPI() + + @app.get("/test") + @require_permission("threads", "read") + async def endpoint(request: Request): + return {"ok": True} + + with TestClient(app) as client: + response = client.get("/test") + assert response.status_code == 401 + assert "Authentication required" in response.json()["detail"] + + +def test_require_permission_denies_wrong_permission(): + """User without required permission gets 403.""" + from fastapi import Request + + app = FastAPI() + user = User(id=uuid4(), email="test@example.com", password_hash="hash") + + @app.get("/test") + @require_permission("threads", "delete") + async def endpoint(request: Request): + return {"ok": True} + + mock_auth = AuthContext(user=user, permissions=[Permissions.THREADS_READ]) + + with patch("app.gateway.authz._authenticate", return_value=mock_auth): + with TestClient(app) as client: + response = client.get("/test") + assert response.status_code == 403 + assert "Permission denied" in response.json()["detail"] + + +# ── Weak JWT secret warning ────────────────────────────────────────────────── + + +# ── User Model Fields ────────────────────────────────────────────────────── + + +def test_user_model_has_needs_setup_default_false(): + """New users default to needs_setup=False.""" + user = User(email="test@example.com", password_hash="hash") + assert user.needs_setup is False + + +def test_user_model_has_token_version_default_zero(): + """New users default to token_version=0.""" + user = User(email="test@example.com", password_hash="hash") + assert user.token_version == 0 + + +def test_user_model_needs_setup_true(): + """Auto-created admin has needs_setup=True.""" + user = User(email="admin@example.com", password_hash="hash", needs_setup=True) + assert user.needs_setup is True + + +def test_sqlite_round_trip_new_fields(): + """needs_setup and token_version survive create → read round-trip. + + Uses the shared persistence engine (same one threads_meta, runs, + run_events, and feedback use). The old separate .deer-flow/users.db + file is gone. + """ + import asyncio + import tempfile + + from app.gateway.auth.repositories.sqlite import SQLiteUserRepository + + async def _run() -> None: + from deerflow.persistence.engine import ( + close_engine, + get_session_factory, + init_engine, + ) + + with tempfile.TemporaryDirectory() as tmpdir: + url = f"sqlite+aiosqlite:///{tmpdir}/scratch.db" + await init_engine("sqlite", url=url, sqlite_dir=tmpdir) + try: + repo = SQLiteUserRepository(get_session_factory()) + user = User( + email="setup@test.com", + password_hash="fakehash", + system_role="admin", + needs_setup=True, + token_version=3, + ) + created = await repo.create_user(user) + assert created.needs_setup is True + assert created.token_version == 3 + + fetched = await repo.get_user_by_email("setup@test.com") + assert fetched is not None + assert fetched.needs_setup is True + assert fetched.token_version == 3 + + fetched.needs_setup = False + fetched.token_version = 4 + await repo.update_user(fetched) + refetched = await repo.get_user_by_id(str(fetched.id)) + assert refetched is not None + assert refetched.needs_setup is False + assert refetched.token_version == 4 + finally: + await close_engine() + + asyncio.run(_run()) + + +def test_update_user_raises_when_row_concurrently_deleted(tmp_path): + """Concurrent-delete during update_user must hard-fail, not silently no-op. + + Earlier the SQLite repo returned the input unchanged when the row was + missing, making a phantom success path that admin password reset + callers (`reset_admin`, `_ensure_admin_user`) would happily log as + 'password reset'. The new contract: raise ``UserNotFoundError`` so + a vanished row never looks like a successful update. + """ + import asyncio + import tempfile + + from app.gateway.auth.repositories.base import UserNotFoundError + from app.gateway.auth.repositories.sqlite import SQLiteUserRepository + + async def _run() -> None: + from deerflow.persistence.engine import ( + close_engine, + get_session_factory, + init_engine, + ) + from deerflow.persistence.user.model import UserRow + + with tempfile.TemporaryDirectory() as d: + url = f"sqlite+aiosqlite:///{d}/scratch.db" + await init_engine("sqlite", url=url, sqlite_dir=d) + try: + sf = get_session_factory() + repo = SQLiteUserRepository(sf) + user = User( + email="ghost@test.com", + password_hash="fakehash", + system_role="user", + ) + created = await repo.create_user(user) + + # Simulate "row vanished underneath us" by deleting the row + # via the raw ORM session, then attempt to update. + async with sf() as session: + row = await session.get(UserRow, str(created.id)) + assert row is not None + await session.delete(row) + await session.commit() + + created.needs_setup = True + with pytest.raises(UserNotFoundError): + await repo.update_user(created) + finally: + await close_engine() + + asyncio.run(_run()) + + +# ── Token Versioning ─────────────────────────────────────────────────────── + + +def test_jwt_encodes_ver(): + """JWT payload includes ver field.""" + import os + + from app.gateway.auth.errors import TokenError + + os.environ["AUTH_JWT_SECRET"] = "test-secret-key-for-jwt-testing-minimum-32-chars" + token = create_access_token(str(uuid4()), token_version=3) + payload = decode_token(token) + assert not isinstance(payload, TokenError) + assert payload.ver == 3 + + +def test_jwt_default_ver_zero(): + """JWT ver defaults to 0.""" + import os + + from app.gateway.auth.errors import TokenError + + os.environ["AUTH_JWT_SECRET"] = "test-secret-key-for-jwt-testing-minimum-32-chars" + token = create_access_token(str(uuid4())) + payload = decode_token(token) + assert not isinstance(payload, TokenError) + assert payload.ver == 0 + + +def test_token_version_mismatch_rejects(): + """Token with stale ver is rejected by get_current_user_from_request.""" + import asyncio + import os + + os.environ["AUTH_JWT_SECRET"] = "test-secret-key-for-jwt-testing-minimum-32-chars" + + user_id = str(uuid4()) + token = create_access_token(user_id, token_version=0) + + mock_user = User(id=user_id, email="test@example.com", password_hash="hash", token_version=1) + + mock_request = MagicMock() + mock_request.cookies = {"access_token": token} + + with patch("app.gateway.deps.get_local_provider") as mock_provider_fn: + mock_provider = MagicMock() + mock_provider.get_user = AsyncMock(return_value=mock_user) + mock_provider_fn.return_value = mock_provider + + from app.gateway.deps import get_current_user_from_request + + with pytest.raises(HTTPException) as exc_info: + asyncio.run(get_current_user_from_request(mock_request)) + assert exc_info.value.status_code == 401 + assert "revoked" in str(exc_info.value.detail).lower() + + +# ── change-password extension ────────────────────────────────────────────── + + +def test_change_password_request_accepts_new_email(): + """ChangePasswordRequest model accepts optional new_email.""" + from app.gateway.routers.auth import ChangePasswordRequest + + req = ChangePasswordRequest( + current_password="old", + new_password="newpassword", + new_email="new@example.com", + ) + assert req.new_email == "new@example.com" + + +def test_change_password_request_new_email_optional(): + """ChangePasswordRequest model works without new_email.""" + from app.gateway.routers.auth import ChangePasswordRequest + + req = ChangePasswordRequest(current_password="old", new_password="newpassword") + assert req.new_email is None + + +def test_login_response_includes_needs_setup(): + """LoginResponse includes needs_setup field.""" + from app.gateway.routers.auth import LoginResponse + + resp = LoginResponse(expires_in=3600, needs_setup=True) + assert resp.needs_setup is True + resp2 = LoginResponse(expires_in=3600) + assert resp2.needs_setup is False + + +# ── Rate Limiting ────────────────────────────────────────────────────────── + + +def test_rate_limiter_allows_under_limit(): + """Requests under the limit are allowed.""" + from app.gateway.routers.auth import _check_rate_limit, _login_attempts + + _login_attempts.clear() + _check_rate_limit("192.168.1.1") # Should not raise + + +def test_rate_limiter_blocks_after_max_failures(): + """IP is blocked after 5 consecutive failures.""" + from app.gateway.routers.auth import _check_rate_limit, _login_attempts, _record_login_failure + + _login_attempts.clear() + ip = "10.0.0.1" + for _ in range(5): + _record_login_failure(ip) + with pytest.raises(HTTPException) as exc_info: + _check_rate_limit(ip) + assert exc_info.value.status_code == 429 + + +def test_rate_limiter_resets_on_success(): + """Successful login clears the failure counter.""" + from app.gateway.routers.auth import _check_rate_limit, _login_attempts, _record_login_failure, _record_login_success + + _login_attempts.clear() + ip = "10.0.0.2" + for _ in range(4): + _record_login_failure(ip) + _record_login_success(ip) + _check_rate_limit(ip) # Should not raise + + +# ── Client IP extraction ───────────────────────────────────────────────── + + +def test_get_client_ip_direct_connection_no_proxy(monkeypatch): + """Direct mode (no AUTH_TRUSTED_PROXIES): use TCP peer regardless of X-Real-IP.""" + monkeypatch.delenv("AUTH_TRUSTED_PROXIES", raising=False) + from app.gateway.routers.auth import _get_client_ip + + req = MagicMock() + req.client.host = "203.0.113.42" + req.headers = {} + assert _get_client_ip(req) == "203.0.113.42" + + +def test_get_client_ip_x_real_ip_ignored_when_no_trusted_proxy(monkeypatch): + """X-Real-IP is silently ignored if AUTH_TRUSTED_PROXIES is unset. + + This closes the bypass where any client could rotate X-Real-IP per + request to dodge per-IP rate limits in dev / direct mode. + """ + monkeypatch.delenv("AUTH_TRUSTED_PROXIES", raising=False) + from app.gateway.routers.auth import _get_client_ip + + req = MagicMock() + req.client.host = "127.0.0.1" + req.headers = {"x-real-ip": "203.0.113.42"} + assert _get_client_ip(req) == "127.0.0.1" + + +def test_get_client_ip_x_real_ip_honored_from_trusted_proxy(monkeypatch): + """X-Real-IP is honored when the TCP peer matches AUTH_TRUSTED_PROXIES.""" + monkeypatch.setenv("AUTH_TRUSTED_PROXIES", "10.0.0.0/8") + from app.gateway.routers.auth import _get_client_ip + + req = MagicMock() + req.client.host = "10.5.6.7" # in trusted CIDR + req.headers = {"x-real-ip": "203.0.113.42"} + assert _get_client_ip(req) == "203.0.113.42" + + +def test_get_client_ip_x_real_ip_rejected_from_untrusted_peer(monkeypatch): + """X-Real-IP is rejected when the TCP peer is NOT in the trusted list.""" + monkeypatch.setenv("AUTH_TRUSTED_PROXIES", "10.0.0.0/8") + from app.gateway.routers.auth import _get_client_ip + + req = MagicMock() + req.client.host = "8.8.8.8" # NOT in trusted CIDR + req.headers = {"x-real-ip": "203.0.113.42"} # client trying to spoof + assert _get_client_ip(req) == "8.8.8.8" + + +def test_get_client_ip_xff_never_honored(monkeypatch): + """X-Forwarded-For is never used; only X-Real-IP from a trusted peer.""" + monkeypatch.setenv("AUTH_TRUSTED_PROXIES", "10.0.0.0/8") + from app.gateway.routers.auth import _get_client_ip + + req = MagicMock() + req.client.host = "10.0.0.1" + req.headers = {"x-forwarded-for": "198.51.100.5"} # no x-real-ip + assert _get_client_ip(req) == "10.0.0.1" + + +def test_get_client_ip_invalid_trusted_proxy_entry_skipped(monkeypatch, caplog): + """Garbage entries in AUTH_TRUSTED_PROXIES are warned and skipped.""" + monkeypatch.setenv("AUTH_TRUSTED_PROXIES", "not-an-ip,10.0.0.0/8") + from app.gateway.routers.auth import _get_client_ip + + req = MagicMock() + req.client.host = "10.5.6.7" + req.headers = {"x-real-ip": "203.0.113.42"} + assert _get_client_ip(req) == "203.0.113.42" # valid entry still works + + +def test_get_client_ip_no_client_returns_unknown(monkeypatch): + """No request.client → 'unknown' marker (no crash).""" + monkeypatch.delenv("AUTH_TRUSTED_PROXIES", raising=False) + from app.gateway.routers.auth import _get_client_ip + + req = MagicMock() + req.client = None + req.headers = {} + assert _get_client_ip(req) == "unknown" + + +# ── Common-password blocklist ──────────────────────────────────────────────── + + +def test_register_rejects_literal_password(): + """Pydantic validator rejects 'password' as a registration password.""" + from pydantic import ValidationError + + from app.gateway.routers.auth import RegisterRequest + + with pytest.raises(ValidationError) as exc: + RegisterRequest(email="x@example.com", password="password") + assert "too common" in str(exc.value) + + +def test_register_rejects_common_password_case_insensitive(): + """Case variants of common passwords are also rejected.""" + from pydantic import ValidationError + + from app.gateway.routers.auth import RegisterRequest + + for variant in ["PASSWORD", "Password1", "qwerty123", "letmein1"]: + with pytest.raises(ValidationError): + RegisterRequest(email="x@example.com", password=variant) + + +def test_register_accepts_strong_password(): + """A non-blocklisted password of length >=8 is accepted.""" + from app.gateway.routers.auth import RegisterRequest + + req = RegisterRequest(email="x@example.com", password="Tr0ub4dor&3-Horse") + assert req.password == "Tr0ub4dor&3-Horse" + + +def test_change_password_rejects_common_password(): + """The same blocklist applies to change-password.""" + from pydantic import ValidationError + + from app.gateway.routers.auth import ChangePasswordRequest + + with pytest.raises(ValidationError): + ChangePasswordRequest(current_password="anything", new_password="iloveyou") + + +def test_password_blocklist_keeps_short_passwords_for_length_check(): + """Short passwords still fail the min_length check (not the blocklist).""" + from pydantic import ValidationError + + from app.gateway.routers.auth import RegisterRequest + + with pytest.raises(ValidationError) as exc: + RegisterRequest(email="x@example.com", password="abc") + # the length check should fire, not the blocklist + assert "at least 8 characters" in str(exc.value) + + +# ── Weak JWT secret warning ────────────────────────────────────────────────── + + +def test_missing_jwt_secret_generates_ephemeral(monkeypatch, caplog): + """get_auth_config() auto-generates an ephemeral secret when AUTH_JWT_SECRET is unset.""" + import logging + + import app.gateway.auth.config as config_module + + config_module._auth_config = None + monkeypatch.delenv("AUTH_JWT_SECRET", raising=False) + + with caplog.at_level(logging.WARNING): + config = config_module.get_auth_config() + + assert config.jwt_secret # non-empty ephemeral secret + assert any("AUTH_JWT_SECRET" in msg for msg in caplog.messages) + + # Cleanup + config_module._auth_config = None + + +# ── Auto-rehash on login ────────────────────────────────────────────────── + + +def test_authenticate_auto_rehashes_legacy_hash(): + """authenticate() upgrades a bare bcrypt hash to v2 on successful login.""" + import asyncio + + from app.gateway.auth.local_provider import LocalAuthProvider + + password = "rehashTest123" + + user = User( + id=uuid4(), + email="rehash@test.com", + password_hash=bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8"), + ) + + mock_repo = MagicMock() + mock_repo.get_user_by_email = AsyncMock(return_value=user) + mock_repo.update_user = AsyncMock(return_value=user) + + provider = LocalAuthProvider(mock_repo) + + result = asyncio.run(provider.authenticate({"email": "rehash@test.com", "password": password})) + assert result is not None + assert result.password_hash.startswith("$dfv2$") + mock_repo.update_user.assert_called_once() + + +def test_authenticate_skips_rehash_for_v2_hash(): + """authenticate() does NOT rehash when the stored hash is already v2.""" + import asyncio + + from app.gateway.auth.local_provider import LocalAuthProvider + + password = "alreadyv2Pass!" + + user = User( + id=uuid4(), + email="v2@test.com", + password_hash=hash_password(password), + ) + + mock_repo = MagicMock() + mock_repo.get_user_by_email = AsyncMock(return_value=user) + mock_repo.update_user = AsyncMock(return_value=user) + + provider = LocalAuthProvider(mock_repo) + + result = asyncio.run(provider.authenticate({"email": "v2@test.com", "password": password})) + assert result is not None + mock_repo.update_user.assert_not_called() diff --git a/backend/tests/test_auth_config.py b/backend/tests/test_auth_config.py new file mode 100644 index 000000000..21b8bd81b --- /dev/null +++ b/backend/tests/test_auth_config.py @@ -0,0 +1,54 @@ +"""Tests for AuthConfig typed configuration.""" + +import os +from unittest.mock import patch + +import pytest + +from app.gateway.auth.config import AuthConfig + + +def test_auth_config_defaults(): + config = AuthConfig(jwt_secret="test-secret-key-123") + assert config.token_expiry_days == 7 + + +def test_auth_config_token_expiry_range(): + AuthConfig(jwt_secret="s", token_expiry_days=1) + AuthConfig(jwt_secret="s", token_expiry_days=30) + with pytest.raises(Exception): + AuthConfig(jwt_secret="s", token_expiry_days=0) + with pytest.raises(Exception): + AuthConfig(jwt_secret="s", token_expiry_days=31) + + +def test_auth_config_from_env(): + env = {"AUTH_JWT_SECRET": "test-jwt-secret-from-env"} + with patch.dict(os.environ, env, clear=False): + import app.gateway.auth.config as cfg + + old = cfg._auth_config + cfg._auth_config = None + try: + config = cfg.get_auth_config() + assert config.jwt_secret == "test-jwt-secret-from-env" + finally: + cfg._auth_config = old + + +def test_auth_config_missing_secret_generates_ephemeral(caplog): + import logging + + import app.gateway.auth.config as cfg + + old = cfg._auth_config + cfg._auth_config = None + try: + with patch.dict(os.environ, {}, clear=True): + os.environ.pop("AUTH_JWT_SECRET", None) + with caplog.at_level(logging.WARNING): + config = cfg.get_auth_config() + assert config.jwt_secret + assert any("AUTH_JWT_SECRET" in msg for msg in caplog.messages) + finally: + cfg._auth_config = old diff --git a/backend/tests/test_auth_errors.py b/backend/tests/test_auth_errors.py new file mode 100644 index 000000000..b3b46c75f --- /dev/null +++ b/backend/tests/test_auth_errors.py @@ -0,0 +1,75 @@ +"""Tests for auth error types and typed decode_token.""" + +from datetime import UTC, datetime, timedelta + +import jwt as pyjwt + +from app.gateway.auth.config import AuthConfig, set_auth_config +from app.gateway.auth.errors import AuthErrorCode, AuthErrorResponse, TokenError +from app.gateway.auth.jwt import create_access_token, decode_token + + +def test_auth_error_code_values(): + assert AuthErrorCode.INVALID_CREDENTIALS == "invalid_credentials" + assert AuthErrorCode.TOKEN_EXPIRED == "token_expired" + assert AuthErrorCode.NOT_AUTHENTICATED == "not_authenticated" + + +def test_token_error_values(): + assert TokenError.EXPIRED == "expired" + assert TokenError.INVALID_SIGNATURE == "invalid_signature" + assert TokenError.MALFORMED == "malformed" + + +def test_auth_error_response_serialization(): + err = AuthErrorResponse( + code=AuthErrorCode.TOKEN_EXPIRED, + message="Token has expired", + ) + d = err.model_dump() + assert d == {"code": "token_expired", "message": "Token has expired"} + + +def test_auth_error_response_from_dict(): + d = {"code": "invalid_credentials", "message": "Wrong password"} + err = AuthErrorResponse(**d) + assert err.code == AuthErrorCode.INVALID_CREDENTIALS + + +# ── decode_token typed failure tests ────────────────────────────── + +_TEST_SECRET = "test-secret-for-jwt-decode-token-tests" + + +def _setup_config(): + set_auth_config(AuthConfig(jwt_secret=_TEST_SECRET)) + + +def test_decode_token_returns_token_error_on_expired(): + _setup_config() + expired_payload = {"sub": "user-1", "exp": datetime.now(UTC) - timedelta(hours=1), "iat": datetime.now(UTC)} + token = pyjwt.encode(expired_payload, _TEST_SECRET, algorithm="HS256") + result = decode_token(token) + assert result == TokenError.EXPIRED + + +def test_decode_token_returns_token_error_on_bad_signature(): + _setup_config() + payload = {"sub": "user-1", "exp": datetime.now(UTC) + timedelta(hours=1), "iat": datetime.now(UTC)} + token = pyjwt.encode(payload, "wrong-secret", algorithm="HS256") + result = decode_token(token) + assert result == TokenError.INVALID_SIGNATURE + + +def test_decode_token_returns_token_error_on_malformed(): + _setup_config() + result = decode_token("not-a-jwt") + assert result == TokenError.MALFORMED + + +def test_decode_token_returns_payload_on_valid(): + _setup_config() + token = create_access_token("user-123") + result = decode_token(token) + assert not isinstance(result, TokenError) + assert result.sub == "user-123" diff --git a/backend/tests/test_auth_middleware.py b/backend/tests/test_auth_middleware.py new file mode 100644 index 000000000..726786ac9 --- /dev/null +++ b/backend/tests/test_auth_middleware.py @@ -0,0 +1,236 @@ +"""Tests for the global AuthMiddleware (fail-closed safety net).""" + +import pytest +from starlette.testclient import TestClient + +from app.gateway.auth_middleware import AuthMiddleware, _is_public + +# ── _is_public unit tests ───────────────────────────────────────────────── + + +@pytest.mark.parametrize( + "path", + [ + "/health", + "/health/", + "/docs", + "/docs/", + "/redoc", + "/openapi.json", + "/api/v1/auth/login/local", + "/api/v1/auth/register", + "/api/v1/auth/logout", + "/api/v1/auth/setup-status", + ], +) +def test_public_paths(path: str): + assert _is_public(path) is True + + +@pytest.mark.parametrize( + "path", + [ + "/api/models", + "/api/mcp/config", + "/api/memory", + "/api/skills", + "/api/threads/123", + "/api/threads/123/uploads", + "/api/agents", + "/api/channels", + "/api/runs/stream", + "/api/threads/123/runs", + "/api/v1/auth/me", + "/api/v1/auth/change-password", + ], +) +def test_protected_paths(path: str): + assert _is_public(path) is False + + +# ── Trailing slash / normalization edge cases ───────────────────────────── + + +@pytest.mark.parametrize( + "path", + [ + "/api/v1/auth/login/local/", + "/api/v1/auth/register/", + "/api/v1/auth/logout/", + "/api/v1/auth/setup-status/", + ], +) +def test_public_auth_paths_with_trailing_slash(path: str): + assert _is_public(path) is True + + +@pytest.mark.parametrize( + "path", + [ + "/api/models/", + "/api/v1/auth/me/", + "/api/v1/auth/change-password/", + ], +) +def test_protected_paths_with_trailing_slash(path: str): + assert _is_public(path) is False + + +def test_unknown_api_path_is_protected(): + """Fail-closed: any new /api/* path is protected by default.""" + assert _is_public("/api/new-feature") is False + assert _is_public("/api/v2/something") is False + assert _is_public("/api/v1/auth/new-endpoint") is False + + +# ── Middleware integration tests ────────────────────────────────────────── + + +def _make_app(): + """Create a minimal FastAPI app with AuthMiddleware for testing.""" + from fastapi import FastAPI + + app = FastAPI() + app.add_middleware(AuthMiddleware) + + @app.get("/health") + async def health(): + return {"status": "ok"} + + @app.get("/api/v1/auth/me") + async def auth_me(): + return {"id": "1", "email": "test@test.com"} + + @app.get("/api/v1/auth/setup-status") + async def setup_status(): + return {"needs_setup": False} + + @app.get("/api/models") + async def models_get(): + return {"models": []} + + @app.put("/api/mcp/config") + async def mcp_put(): + return {"ok": True} + + @app.delete("/api/threads/abc") + async def thread_delete(): + return {"ok": True} + + @app.patch("/api/threads/abc") + async def thread_patch(): + return {"ok": True} + + @app.post("/api/threads/abc/runs/stream") + async def stream(): + return {"ok": True} + + @app.get("/api/future-endpoint") + async def future(): + return {"ok": True} + + return app + + +@pytest.fixture +def client(): + return TestClient(_make_app()) + + +def test_public_path_no_cookie(client): + res = client.get("/health") + assert res.status_code == 200 + + +def test_public_auth_path_no_cookie(client): + """Public auth endpoints (login/register) pass without cookie.""" + res = client.get("/api/v1/auth/setup-status") + assert res.status_code == 200 + + +def test_protected_auth_path_no_cookie(client): + """/auth/me requires cookie even though it's under /api/v1/auth/.""" + res = client.get("/api/v1/auth/me") + assert res.status_code == 401 + + +def test_protected_path_no_cookie_returns_401(client): + res = client.get("/api/models") + assert res.status_code == 401 + body = res.json() + assert body["detail"]["code"] == "not_authenticated" + + +def test_protected_path_with_junk_cookie_rejected(client): + """Junk cookie → 401. Middleware strictly validates the JWT now + (AUTH_TEST_PLAN test 7.5.8); it no longer silently passes bad + tokens through to the route handler.""" + res = client.get("/api/models", cookies={"access_token": "some-token"}) + assert res.status_code == 401 + + +def test_protected_post_no_cookie_returns_401(client): + res = client.post("/api/threads/abc/runs/stream") + assert res.status_code == 401 + + +def test_protected_post_with_internal_auth_header_passes(): + from app.gateway.internal_auth import create_internal_auth_headers + + app = _make_app() + client = TestClient(app) + + res = client.post( + "/api/threads/abc/runs/stream", + headers=create_internal_auth_headers(), + ) + + assert res.status_code == 200 + + +# ── Method matrix: PUT/DELETE/PATCH also protected ──────────────────────── + + +def test_protected_put_no_cookie(client): + res = client.put("/api/mcp/config") + assert res.status_code == 401 + + +def test_protected_delete_no_cookie(client): + res = client.delete("/api/threads/abc") + assert res.status_code == 401 + + +def test_protected_patch_no_cookie(client): + res = client.patch("/api/threads/abc") + assert res.status_code == 401 + + +def test_put_with_junk_cookie_rejected(client): + """Junk cookie on PUT → 401 (strict JWT validation in middleware).""" + client.cookies.set("access_token", "tok") + res = client.put("/api/mcp/config") + assert res.status_code == 401 + + +def test_delete_with_junk_cookie_rejected(client): + """Junk cookie on DELETE → 401 (strict JWT validation in middleware).""" + client.cookies.set("access_token", "tok") + res = client.delete("/api/threads/abc") + assert res.status_code == 401 + + +# ── Fail-closed: unknown future endpoints ───────────────────────────────── + + +def test_unknown_endpoint_no_cookie_returns_401(client): + """Any new /api/* endpoint is blocked by default without cookie.""" + res = client.get("/api/future-endpoint") + assert res.status_code == 401 + + +def test_unknown_endpoint_with_junk_cookie_rejected(client): + """New endpoints are also protected by strict JWT validation.""" + client.cookies.set("access_token", "tok") + res = client.get("/api/future-endpoint") + assert res.status_code == 401 diff --git a/backend/tests/test_auth_type_system.py b/backend/tests/test_auth_type_system.py new file mode 100644 index 000000000..226d3812c --- /dev/null +++ b/backend/tests/test_auth_type_system.py @@ -0,0 +1,701 @@ +"""Tests for auth type system hardening. + +Covers structured error responses, typed decode_token callers, +CSRF middleware path matching, config-driven cookie security, +and unhappy paths / edge cases for all auth boundaries. +""" + +import os +import secrets +from datetime import UTC, datetime, timedelta +from unittest.mock import patch + +import jwt as pyjwt +import pytest +from fastapi import FastAPI +from fastapi.testclient import TestClient +from pydantic import ValidationError + +from app.gateway.auth.config import AuthConfig, set_auth_config +from app.gateway.auth.errors import AuthErrorCode, AuthErrorResponse, TokenError +from app.gateway.auth.jwt import decode_token +from app.gateway.csrf_middleware import ( + CSRF_COOKIE_NAME, + CSRF_HEADER_NAME, + CSRFMiddleware, + is_auth_endpoint, + should_check_csrf, +) + +# ── Setup ──────────────────────────────────────────────────────────── + +_TEST_SECRET = "test-secret-for-auth-type-system-tests-min32" + + +@pytest.fixture(autouse=True) +def _persistence_engine(tmp_path): + """Initialise a per-test SQLite engine + reset cached provider singletons. + + The auth tests call real HTTP handlers that go through + ``SQLiteUserRepository`` → ``get_session_factory``. Each test gets + a fresh DB plus a clean ``deps._cached_*`` so the cached provider + does not hold a dangling reference to the previous test's engine. + """ + import asyncio + + from app.gateway import deps + from deerflow.persistence.engine import close_engine, init_engine + + url = f"sqlite+aiosqlite:///{tmp_path}/auth_types.db" + asyncio.run(init_engine("sqlite", url=url, sqlite_dir=str(tmp_path))) + deps._cached_local_provider = None + deps._cached_repo = None + try: + yield + finally: + deps._cached_local_provider = None + deps._cached_repo = None + asyncio.run(close_engine()) + + +def _setup_config(): + set_auth_config(AuthConfig(jwt_secret=_TEST_SECRET)) + + +# ── CSRF Middleware Path Matching ──────────────────────────────────── + + +class _FakeRequest: + """Minimal request mock for CSRF path matching tests.""" + + def __init__(self, path: str, method: str = "POST"): + self.method = method + + class _URL: + def __init__(self, p): + self.path = p + + self.url = _URL(path) + self.cookies = {} + self.headers = {} + + +def test_csrf_exempts_login_local(): + """login/local (actual route) should be exempt from CSRF.""" + req = _FakeRequest("/api/v1/auth/login/local") + assert is_auth_endpoint(req) is True + + +def test_csrf_exempts_login_local_trailing_slash(): + """Trailing slash should also be exempt.""" + req = _FakeRequest("/api/v1/auth/login/local/") + assert is_auth_endpoint(req) is True + + +def test_csrf_exempts_logout(): + req = _FakeRequest("/api/v1/auth/logout") + assert is_auth_endpoint(req) is True + + +def test_csrf_exempts_register(): + req = _FakeRequest("/api/v1/auth/register") + assert is_auth_endpoint(req) is True + + +def test_csrf_does_not_exempt_old_login_path(): + """Old /api/v1/auth/login (without /local) should NOT be exempt.""" + req = _FakeRequest("/api/v1/auth/login") + assert is_auth_endpoint(req) is False + + +def test_csrf_does_not_exempt_me(): + req = _FakeRequest("/api/v1/auth/me") + assert is_auth_endpoint(req) is False + + +def test_csrf_skips_get_requests(): + req = _FakeRequest("/api/v1/auth/me", method="GET") + assert should_check_csrf(req) is False + + +def test_csrf_checks_post_to_protected(): + req = _FakeRequest("/api/v1/some/endpoint", method="POST") + assert should_check_csrf(req) is True + + +# ── Structured Error Response Format ──────────────────────────────── + + +def test_auth_error_response_has_code_and_message(): + """All auth errors should have structured {code, message} format.""" + err = AuthErrorResponse( + code=AuthErrorCode.INVALID_CREDENTIALS, + message="Wrong password", + ) + d = err.model_dump() + assert "code" in d + assert "message" in d + assert d["code"] == "invalid_credentials" + + +def test_auth_error_response_all_codes_serializable(): + """Every AuthErrorCode should be serializable in AuthErrorResponse.""" + for code in AuthErrorCode: + err = AuthErrorResponse(code=code, message=f"Test {code.value}") + d = err.model_dump() + assert d["code"] == code.value + + +# ── decode_token Caller Pattern ────────────────────────────────────── + + +def test_decode_token_expired_maps_to_token_expired_code(): + """TokenError.EXPIRED should map to AuthErrorCode.TOKEN_EXPIRED.""" + _setup_config() + from datetime import UTC, datetime, timedelta + + import jwt as pyjwt + + expired = {"sub": "u1", "exp": datetime.now(UTC) - timedelta(hours=1), "iat": datetime.now(UTC)} + token = pyjwt.encode(expired, _TEST_SECRET, algorithm="HS256") + result = decode_token(token) + assert result == TokenError.EXPIRED + + # Verify the mapping pattern used in route handlers + code = AuthErrorCode.TOKEN_EXPIRED if result == TokenError.EXPIRED else AuthErrorCode.TOKEN_INVALID + assert code == AuthErrorCode.TOKEN_EXPIRED + + +def test_decode_token_invalid_sig_maps_to_token_invalid_code(): + """TokenError.INVALID_SIGNATURE should map to AuthErrorCode.TOKEN_INVALID.""" + _setup_config() + from datetime import UTC, datetime, timedelta + + import jwt as pyjwt + + payload = {"sub": "u1", "exp": datetime.now(UTC) + timedelta(hours=1), "iat": datetime.now(UTC)} + token = pyjwt.encode(payload, "wrong-key", algorithm="HS256") + result = decode_token(token) + assert result == TokenError.INVALID_SIGNATURE + + code = AuthErrorCode.TOKEN_EXPIRED if result == TokenError.EXPIRED else AuthErrorCode.TOKEN_INVALID + assert code == AuthErrorCode.TOKEN_INVALID + + +def test_decode_token_malformed_maps_to_token_invalid_code(): + """TokenError.MALFORMED should map to AuthErrorCode.TOKEN_INVALID.""" + _setup_config() + result = decode_token("garbage") + assert result == TokenError.MALFORMED + + code = AuthErrorCode.TOKEN_EXPIRED if result == TokenError.EXPIRED else AuthErrorCode.TOKEN_INVALID + assert code == AuthErrorCode.TOKEN_INVALID + + +# ── Login Response Format ──────────────────────────────────────────── + + +def test_login_response_model_has_no_access_token(): + """LoginResponse should NOT contain access_token field (RFC-001).""" + from app.gateway.routers.auth import LoginResponse + + resp = LoginResponse(expires_in=604800) + d = resp.model_dump() + assert "access_token" not in d + assert "expires_in" in d + assert d["expires_in"] == 604800 + + +def test_login_response_model_fields(): + """LoginResponse has expires_in and needs_setup.""" + from app.gateway.routers.auth import LoginResponse + + fields = set(LoginResponse.model_fields.keys()) + assert fields == {"expires_in", "needs_setup"} + + +# ── AuthConfig in Route ────────────────────────────────────────────── + + +def test_auth_config_token_expiry_used_in_login_response(): + """LoginResponse.expires_in should come from config.token_expiry_days.""" + from app.gateway.routers.auth import LoginResponse + + expected_seconds = 14 * 24 * 3600 + resp = LoginResponse(expires_in=expected_seconds) + assert resp.expires_in == expected_seconds + + +# ── UserResponse Type Preservation ─────────────────────────────────── + + +def test_user_response_system_role_literal(): + """UserResponse.system_role should only accept 'admin' or 'user'.""" + from app.gateway.auth.models import UserResponse + + # Valid roles + resp = UserResponse(id="1", email="a@b.com", system_role="admin") + assert resp.system_role == "admin" + + resp = UserResponse(id="1", email="a@b.com", system_role="user") + assert resp.system_role == "user" + + +def test_user_response_rejects_invalid_role(): + """UserResponse should reject invalid system_role values.""" + from app.gateway.auth.models import UserResponse + + with pytest.raises(ValidationError): + UserResponse(id="1", email="a@b.com", system_role="superadmin") + + +# ══════════════════════════════════════════════════════════════════════ +# UNHAPPY PATHS / EDGE CASES +# ══════════════════════════════════════════════════════════════════════ + + +# ── get_current_user structured 401 responses ──────────────────────── + + +def test_get_current_user_no_cookie_returns_not_authenticated(): + """No cookie → 401 with code=not_authenticated.""" + import asyncio + + from fastapi import HTTPException + + from app.gateway.deps import get_current_user_from_request + + mock_request = type("MockRequest", (), {"cookies": {}})() + with pytest.raises(HTTPException) as exc_info: + asyncio.run(get_current_user_from_request(mock_request)) + assert exc_info.value.status_code == 401 + detail = exc_info.value.detail + assert detail["code"] == "not_authenticated" + + +def test_get_current_user_expired_token_returns_token_expired(): + """Expired token → 401 with code=token_expired.""" + import asyncio + + from fastapi import HTTPException + + from app.gateway.deps import get_current_user_from_request + + _setup_config() + expired = {"sub": "u1", "exp": datetime.now(UTC) - timedelta(hours=1), "iat": datetime.now(UTC)} + token = pyjwt.encode(expired, _TEST_SECRET, algorithm="HS256") + + mock_request = type("MockRequest", (), {"cookies": {"access_token": token}})() + with pytest.raises(HTTPException) as exc_info: + asyncio.run(get_current_user_from_request(mock_request)) + assert exc_info.value.status_code == 401 + detail = exc_info.value.detail + assert detail["code"] == "token_expired" + + +def test_get_current_user_invalid_token_returns_token_invalid(): + """Bad signature → 401 with code=token_invalid.""" + import asyncio + + from fastapi import HTTPException + + from app.gateway.deps import get_current_user_from_request + + _setup_config() + payload = {"sub": "u1", "exp": datetime.now(UTC) + timedelta(hours=1), "iat": datetime.now(UTC)} + token = pyjwt.encode(payload, "wrong-secret", algorithm="HS256") + + mock_request = type("MockRequest", (), {"cookies": {"access_token": token}})() + with pytest.raises(HTTPException) as exc_info: + asyncio.run(get_current_user_from_request(mock_request)) + assert exc_info.value.status_code == 401 + detail = exc_info.value.detail + assert detail["code"] == "token_invalid" + + +def test_get_current_user_malformed_token_returns_token_invalid(): + """Garbage token → 401 with code=token_invalid.""" + import asyncio + + from fastapi import HTTPException + + from app.gateway.deps import get_current_user_from_request + + _setup_config() + mock_request = type("MockRequest", (), {"cookies": {"access_token": "not-a-jwt"}})() + with pytest.raises(HTTPException) as exc_info: + asyncio.run(get_current_user_from_request(mock_request)) + assert exc_info.value.status_code == 401 + detail = exc_info.value.detail + assert detail["code"] == "token_invalid" + + +# ── decode_token edge cases ────────────────────────────────────────── + + +def test_decode_token_empty_string_returns_malformed(): + _setup_config() + result = decode_token("") + assert result == TokenError.MALFORMED + + +def test_decode_token_whitespace_returns_malformed(): + _setup_config() + result = decode_token(" ") + assert result == TokenError.MALFORMED + + +# ── AuthConfig validation edge cases ───────────────────────────────── + + +def test_auth_config_missing_jwt_secret_raises(): + """AuthConfig requires jwt_secret — no default allowed.""" + with pytest.raises(ValidationError): + AuthConfig() + + +def test_auth_config_token_expiry_zero_raises(): + """token_expiry_days must be >= 1.""" + with pytest.raises(ValidationError): + AuthConfig(jwt_secret="secret", token_expiry_days=0) + + +def test_auth_config_token_expiry_31_raises(): + """token_expiry_days must be <= 30.""" + with pytest.raises(ValidationError): + AuthConfig(jwt_secret="secret", token_expiry_days=31) + + +def test_auth_config_token_expiry_boundary_1_ok(): + config = AuthConfig(jwt_secret="secret", token_expiry_days=1) + assert config.token_expiry_days == 1 + + +def test_auth_config_token_expiry_boundary_30_ok(): + config = AuthConfig(jwt_secret="secret", token_expiry_days=30) + assert config.token_expiry_days == 30 + + +def test_get_auth_config_missing_env_var_generates_ephemeral(caplog): + """get_auth_config() auto-generates ephemeral secret when AUTH_JWT_SECRET is unset.""" + import logging + + import app.gateway.auth.config as cfg + + old = cfg._auth_config + cfg._auth_config = None + try: + with patch.dict(os.environ, {}, clear=True): + os.environ.pop("AUTH_JWT_SECRET", None) + with caplog.at_level(logging.WARNING): + config = cfg.get_auth_config() + assert config.jwt_secret + assert any("AUTH_JWT_SECRET" in msg for msg in caplog.messages) + finally: + cfg._auth_config = old + + +# ── CSRF middleware integration (unhappy paths) ────────────────────── + + +def _make_csrf_app(): + """Create a minimal FastAPI app with CSRFMiddleware for testing.""" + from fastapi import HTTPException as _HTTPException + from fastapi.responses import JSONResponse as _JSONResponse + + app = FastAPI() + + @app.exception_handler(_HTTPException) + async def _http_exc_handler(request, exc): + return _JSONResponse(status_code=exc.status_code, content={"detail": exc.detail}) + + app.add_middleware(CSRFMiddleware) + + @app.post("/api/v1/test/protected") + async def protected(): + return {"ok": True} + + @app.post("/api/v1/auth/login/local") + async def login(): + return {"ok": True} + + @app.get("/api/v1/test/read") + async def read_endpoint(): + return {"ok": True} + + return app + + +def test_csrf_middleware_blocks_post_without_token(): + """POST to protected endpoint without CSRF token → 403 with structured detail.""" + client = TestClient(_make_csrf_app()) + resp = client.post("/api/v1/test/protected") + assert resp.status_code == 403 + assert "CSRF" in resp.json()["detail"] + assert "missing" in resp.json()["detail"].lower() + + +def test_csrf_middleware_blocks_post_with_mismatched_token(): + """POST with mismatched CSRF cookie/header → 403 with mismatch detail.""" + client = TestClient(_make_csrf_app()) + client.cookies.set(CSRF_COOKIE_NAME, "token-a") + resp = client.post( + "/api/v1/test/protected", + headers={CSRF_HEADER_NAME: "token-b"}, + ) + assert resp.status_code == 403 + assert "mismatch" in resp.json()["detail"].lower() + + +def test_csrf_middleware_allows_post_with_matching_token(): + """POST with matching CSRF cookie/header → 200.""" + client = TestClient(_make_csrf_app()) + token = secrets.token_urlsafe(64) + client.cookies.set(CSRF_COOKIE_NAME, token) + resp = client.post( + "/api/v1/test/protected", + headers={CSRF_HEADER_NAME: token}, + ) + assert resp.status_code == 200 + + +def test_csrf_middleware_allows_get_without_token(): + """GET requests bypass CSRF check.""" + client = TestClient(_make_csrf_app()) + resp = client.get("/api/v1/test/read") + assert resp.status_code == 200 + + +def test_csrf_middleware_exempts_login_local(): + """POST to login/local is exempt from CSRF (no token yet).""" + client = TestClient(_make_csrf_app()) + resp = client.post("/api/v1/auth/login/local") + assert resp.status_code == 200 + + +def test_csrf_middleware_sets_cookie_on_auth_endpoint(): + """Auth endpoints should receive a CSRF cookie in response.""" + client = TestClient(_make_csrf_app()) + resp = client.post("/api/v1/auth/login/local") + assert CSRF_COOKIE_NAME in resp.cookies + + +# ── UserResponse edge cases ────────────────────────────────────────── + + +def test_user_response_missing_required_fields(): + """UserResponse with missing fields → ValidationError.""" + from app.gateway.auth.models import UserResponse + + with pytest.raises(ValidationError): + UserResponse(id="1") # missing email, system_role + + with pytest.raises(ValidationError): + UserResponse(id="1", email="a@b.com") # missing system_role + + +def test_user_response_empty_string_role_rejected(): + """Empty string is not a valid role.""" + from app.gateway.auth.models import UserResponse + + with pytest.raises(ValidationError): + UserResponse(id="1", email="a@b.com", system_role="") + + +# ══════════════════════════════════════════════════════════════════════ +# HTTP-LEVEL API CONTRACT TESTS +# ══════════════════════════════════════════════════════════════════════ + + +def _make_auth_app(): + """Create FastAPI app with auth routes for contract testing.""" + from app.gateway.app import create_app + + return create_app() + + +def _get_auth_client(): + """Get TestClient for auth API contract tests.""" + return TestClient(_make_auth_app()) + + +def test_api_auth_me_no_cookie_returns_structured_401(): + """/api/v1/auth/me without cookie → 401 with {code: 'not_authenticated'}.""" + _setup_config() + client = _get_auth_client() + resp = client.get("/api/v1/auth/me") + assert resp.status_code == 401 + body = resp.json() + assert body["detail"]["code"] == "not_authenticated" + assert "message" in body["detail"] + + +def test_api_auth_me_expired_token_returns_structured_401(): + """/api/v1/auth/me with expired token → 401 with {code: 'token_expired'}.""" + _setup_config() + expired = {"sub": "u1", "exp": datetime.now(UTC) - timedelta(hours=1), "iat": datetime.now(UTC)} + token = pyjwt.encode(expired, _TEST_SECRET, algorithm="HS256") + + client = _get_auth_client() + client.cookies.set("access_token", token) + resp = client.get("/api/v1/auth/me") + assert resp.status_code == 401 + body = resp.json() + assert body["detail"]["code"] == "token_expired" + + +def test_api_auth_me_invalid_sig_returns_structured_401(): + """/api/v1/auth/me with bad signature → 401 with {code: 'token_invalid'}.""" + _setup_config() + payload = {"sub": "u1", "exp": datetime.now(UTC) + timedelta(hours=1), "iat": datetime.now(UTC)} + token = pyjwt.encode(payload, "wrong-key", algorithm="HS256") + + client = _get_auth_client() + client.cookies.set("access_token", token) + resp = client.get("/api/v1/auth/me") + assert resp.status_code == 401 + body = resp.json() + assert body["detail"]["code"] == "token_invalid" + + +def test_api_login_bad_credentials_returns_structured_401(): + """Login with wrong password → 401 with {code: 'invalid_credentials'}.""" + _setup_config() + client = _get_auth_client() + resp = client.post( + "/api/v1/auth/login/local", + data={"username": "nonexistent@test.com", "password": "wrongpassword"}, + ) + assert resp.status_code == 401 + body = resp.json() + assert body["detail"]["code"] == "invalid_credentials" + + +def test_api_login_success_no_token_in_body(): + """Successful login → response body has expires_in but NOT access_token.""" + _setup_config() + client = _get_auth_client() + # Register first + client.post( + "/api/v1/auth/register", + json={"email": "contract-test@test.com", "password": "securepassword123"}, + ) + # Login + resp = client.post( + "/api/v1/auth/login/local", + data={"username": "contract-test@test.com", "password": "securepassword123"}, + ) + assert resp.status_code == 200 + body = resp.json() + assert "expires_in" in body + assert "access_token" not in body + # Token should be in cookie, not body + assert "access_token" in resp.cookies + + +def test_api_register_duplicate_returns_structured_400(): + """Register with duplicate email → 400 with {code: 'email_already_exists'}.""" + _setup_config() + client = _get_auth_client() + email = "dup-contract-test@test.com" + # First register + client.post("/api/v1/auth/register", json={"email": email, "password": "Tr0ub4dor3a"}) + # Duplicate + resp = client.post("/api/v1/auth/register", json={"email": email, "password": "AnotherStr0ngPwd!"}) + assert resp.status_code == 400 + body = resp.json() + assert body["detail"]["code"] == "email_already_exists" + + +# ── Cookie security: HTTP vs HTTPS ──────────────────────────────────── + + +def _unique_email(prefix: str) -> str: + return f"{prefix}-{secrets.token_hex(4)}@test.com" + + +def _get_set_cookie_headers(resp) -> list[str]: + """Extract all set-cookie header values from a TestClient response.""" + return [v for k, v in resp.headers.multi_items() if k.lower() == "set-cookie"] + + +def test_register_http_cookie_httponly_true_secure_false(): + """HTTP register → access_token cookie is httponly=True, secure=False, no max_age.""" + _setup_config() + client = _get_auth_client() + resp = client.post( + "/api/v1/auth/register", + json={"email": _unique_email("http-cookie"), "password": "Tr0ub4dor3a"}, + ) + assert resp.status_code == 201 + cookie_header = resp.headers.get("set-cookie", "") + assert "access_token=" in cookie_header + assert "httponly" in cookie_header.lower() + assert "secure" not in cookie_header.lower().replace("samesite", "") + + +def test_register_https_cookie_httponly_true_secure_true(): + """HTTPS register (x-forwarded-proto) → access_token cookie is httponly=True, secure=True, has max_age.""" + _setup_config() + client = _get_auth_client() + resp = client.post( + "/api/v1/auth/register", + json={"email": _unique_email("https-cookie"), "password": "Tr0ub4dor3a"}, + headers={"x-forwarded-proto": "https"}, + ) + assert resp.status_code == 201 + cookie_header = resp.headers.get("set-cookie", "") + assert "access_token=" in cookie_header + assert "httponly" in cookie_header.lower() + assert "secure" in cookie_header.lower() + assert "max-age" in cookie_header.lower() + + +def test_login_https_sets_secure_cookie(): + """HTTPS login → access_token cookie has secure flag.""" + _setup_config() + client = _get_auth_client() + email = _unique_email("https-login") + client.post("/api/v1/auth/register", json={"email": email, "password": "Tr0ub4dor3a"}) + resp = client.post( + "/api/v1/auth/login/local", + data={"username": email, "password": "Tr0ub4dor3a"}, + headers={"x-forwarded-proto": "https"}, + ) + assert resp.status_code == 200 + cookie_header = resp.headers.get("set-cookie", "") + assert "access_token=" in cookie_header + assert "httponly" in cookie_header.lower() + assert "secure" in cookie_header.lower() + + +def test_csrf_cookie_secure_on_https(): + """HTTPS register → csrf_token cookie has secure flag but NOT httponly.""" + _setup_config() + client = _get_auth_client() + resp = client.post( + "/api/v1/auth/register", + json={"email": _unique_email("csrf-https"), "password": "Tr0ub4dor3a"}, + headers={"x-forwarded-proto": "https"}, + ) + assert resp.status_code == 201 + csrf_cookies = [h for h in _get_set_cookie_headers(resp) if "csrf_token=" in h] + assert csrf_cookies, "csrf_token cookie not set on HTTPS register" + csrf_header = csrf_cookies[0] + assert "secure" in csrf_header.lower() + assert "httponly" not in csrf_header.lower() + + +def test_csrf_cookie_not_secure_on_http(): + """HTTP register → csrf_token cookie does NOT have secure flag.""" + _setup_config() + client = _get_auth_client() + resp = client.post( + "/api/v1/auth/register", + json={"email": _unique_email("csrf-http"), "password": "Tr0ub4dor3a"}, + ) + assert resp.status_code == 201 + csrf_cookies = [h for h in _get_set_cookie_headers(resp) if "csrf_token=" in h] + assert csrf_cookies, "csrf_token cookie not set on HTTP register" + csrf_header = csrf_cookies[0] + assert "secure" not in csrf_header.lower().replace("samesite", "") diff --git a/backend/tests/test_channel_file_attachments.py b/backend/tests/test_channel_file_attachments.py index 2843a9cd0..7273b1c82 100644 --- a/backend/tests/test_channel_file_attachments.py +++ b/backend/tests/test_channel_file_attachments.py @@ -231,7 +231,7 @@ class TestResolveAttachments: mock_paths = MagicMock() mock_paths.sandbox_outputs_dir.return_value = outputs_dir - def resolve_side_effect(tid, vpath): + def resolve_side_effect(tid, vpath, *, user_id=None): if "data.csv" in vpath: return good_file return tmp_path / "missing.txt" diff --git a/backend/tests/test_channels.py b/backend/tests/test_channels.py index b3c1870e3..9bd484567 100644 --- a/backend/tests/test_channels.py +++ b/backend/tests/test_channels.py @@ -414,6 +414,27 @@ def _make_async_iterator(items): class TestChannelManager: + def test_get_client_includes_csrf_header_and_cookie(self): + from app.channels.manager import ChannelManager + + bus = MessageBus() + store = ChannelStore(path=Path(tempfile.mkdtemp()) / "store.json") + manager = ChannelManager(bus=bus, store=store, langgraph_url="http://localhost:8001") + + with patch("langgraph_sdk.get_client") as get_client: + get_client.return_value = object() + + manager._get_client() + + get_client.assert_called_once() + kwargs = get_client.call_args.kwargs + assert kwargs["url"] == "http://localhost:8001" + headers = kwargs["headers"] + csrf_token = headers["X-CSRF-Token"] + assert csrf_token + assert headers["Cookie"] == f"csrf_token={csrf_token}" + assert headers["X-DeerFlow-Internal-Token"] + def test_handle_chat_calls_channel_receive_file_for_inbound_files(self, monkeypatch): from app.channels.manager import ChannelManager @@ -441,6 +462,7 @@ class TestChannelManager: ) mock_channel = MagicMock() mock_channel.receive_file = AsyncMock(return_value=modified_msg) + mock_channel.supports_streaming = False mock_service = MagicMock() mock_service.get_channel.return_value = mock_channel monkeypatch.setattr("app.channels.service.get_channel_service", lambda: mock_service) @@ -495,7 +517,7 @@ class TestChannelManager: await _wait_for(lambda: len(outbound_received) >= 1) await manager.stop() - # Thread should be created on the LangGraph Server + # Thread should be created through Gateway mock_client.threads.create.assert_called_once() # Thread ID should be stored @@ -516,6 +538,89 @@ class TestChannelManager: _run(go()) + def test_handle_chat_outbound_preserves_inbound_metadata(self): + """DingTalk (and similar) need inbound metadata on outbound sends (e.g. sender_staff_id).""" + from app.channels.manager import ChannelManager + + async def go(): + bus = MessageBus() + store = ChannelStore(path=Path(tempfile.mkdtemp()) / "store.json") + manager = ChannelManager(bus=bus, store=store) + outbound_received: list[OutboundMessage] = [] + + async def capture_outbound(msg: OutboundMessage) -> None: + outbound_received.append(msg) + + bus.subscribe_outbound(capture_outbound) + mock_client = _make_mock_langgraph_client() + manager._client = mock_client + await manager.start() + + meta = { + "sender_staff_id": "staff_001", + "conversation_type": "1", + "conversation_id": "conv_001", + } + inbound = InboundMessage( + channel_name="test", + chat_id="chat1", + user_id="user1", + text="hi", + metadata=meta, + ) + await bus.publish_inbound(inbound) + await _wait_for(lambda: len(outbound_received) >= 1) + await manager.stop() + + assert len(outbound_received) == 1 + assert outbound_received[0].metadata == meta + + _run(go()) + + def test_handle_chat_outbound_drops_large_metadata_keys(self): + """Large metadata keys like raw_message should be stripped from outbound messages.""" + from app.channels.manager import ChannelManager + + async def go(): + bus = MessageBus() + store = ChannelStore(path=Path(tempfile.mkdtemp()) / "store.json") + manager = ChannelManager(bus=bus, store=store) + outbound_received: list[OutboundMessage] = [] + + async def capture_outbound(msg: OutboundMessage) -> None: + outbound_received.append(msg) + + bus.subscribe_outbound(capture_outbound) + mock_client = _make_mock_langgraph_client() + manager._client = mock_client + await manager.start() + + meta = { + "sender_staff_id": "staff_001", + "conversation_type": "1", + "raw_message": {"huge": "payload" * 1000}, + "ref_msg": {"also": "large"}, + } + inbound = InboundMessage( + channel_name="test", + chat_id="chat1", + user_id="user1", + text="hi", + metadata=meta, + ) + await bus.publish_inbound(inbound) + await _wait_for(lambda: len(outbound_received) >= 1) + await manager.stop() + + assert len(outbound_received) == 1 + out_meta = outbound_received[0].metadata + assert "sender_staff_id" in out_meta + assert "conversation_type" in out_meta + assert "raw_message" not in out_meta + assert "ref_msg" not in out_meta + + _run(go()) + def test_handle_chat_uses_channel_session_overrides(self): from app.channels.manager import ChannelManager @@ -2114,30 +2219,105 @@ class TestChannelService: def test_service_urls_fall_back_to_env(self, monkeypatch): from app.channels.service import ChannelService - monkeypatch.setenv("DEER_FLOW_CHANNELS_LANGGRAPH_URL", "http://langgraph:2024") + monkeypatch.setenv("DEER_FLOW_CHANNELS_LANGGRAPH_URL", "http://gateway:8001/api") monkeypatch.setenv("DEER_FLOW_CHANNELS_GATEWAY_URL", "http://gateway:8001") service = ChannelService(channels_config={}) - assert service.manager._langgraph_url == "http://langgraph:2024" + assert service.manager._langgraph_url == "http://gateway:8001/api" assert service.manager._gateway_url == "http://gateway:8001" def test_config_service_urls_override_env(self, monkeypatch): from app.channels.service import ChannelService - monkeypatch.setenv("DEER_FLOW_CHANNELS_LANGGRAPH_URL", "http://langgraph:2024") + monkeypatch.setenv("DEER_FLOW_CHANNELS_LANGGRAPH_URL", "http://gateway:8001/api") monkeypatch.setenv("DEER_FLOW_CHANNELS_GATEWAY_URL", "http://gateway:8001") service = ChannelService( channels_config={ - "langgraph_url": "http://custom-langgraph:2024", + "langgraph_url": "http://custom-gateway:8001/api", "gateway_url": "http://custom-gateway:8001", } ) - assert service.manager._langgraph_url == "http://custom-langgraph:2024" + assert service.manager._langgraph_url == "http://custom-gateway:8001/api" assert service.manager._gateway_url == "http://custom-gateway:8001" + def test_from_app_config_uses_explicit_config(self): + from app.channels.service import ChannelService + + app_config = SimpleNamespace( + model_extra={ + "channels": { + "telegram": {"enabled": False}, + } + } + ) + + with patch("deerflow.config.app_config.get_app_config", side_effect=AssertionError("should not read global config")): + service = ChannelService.from_app_config(app_config) + + assert service._config == {"telegram": {"enabled": False}} + + def test_disabled_channel_with_string_creds_emits_warning(self, caplog): + """Warning is emitted when a channel has string credentials but enabled=false.""" + import logging + + from app.channels.service import ChannelService + + async def go(): + service = ChannelService( + channels_config={ + "wecom": {"enabled": False, "bot_id": "corp123", "bot_secret": "secret"}, + } + ) + with caplog.at_level(logging.WARNING, logger="app.channels.service"): + await service.start() + await service.stop() + + _run(go()) + assert any("wecom" in r.message and r.levelno == logging.WARNING for r in caplog.records) + + def test_disabled_channel_with_int_creds_emits_warning(self, caplog): + """Warning is emitted even when YAML-parsed integer credentials are present.""" + import logging + + from app.channels.service import ChannelService + + async def go(): + # Simulate YAML parsing a numeric token/ID as an int + service = ChannelService( + channels_config={ + "telegram": {"enabled": False, "bot_token": 123456789}, + } + ) + with caplog.at_level(logging.WARNING, logger="app.channels.service"): + await service.start() + await service.stop() + + _run(go()) + assert any("telegram" in r.message and r.levelno == logging.WARNING for r in caplog.records) + + def test_disabled_channel_without_creds_emits_info(self, caplog): + """Only an info log (no warning) is emitted when a channel is disabled with no credentials.""" + import logging + + from app.channels.service import ChannelService + + async def go(): + service = ChannelService( + channels_config={ + "telegram": {"enabled": False}, + } + ) + with caplog.at_level(logging.DEBUG, logger="app.channels.service"): + await service.start() + await service.stop() + + _run(go()) + warning_records = [r for r in caplog.records if "telegram" in r.message and r.levelno == logging.WARNING] + assert not warning_records + # --------------------------------------------------------------------------- # Slack send retry tests @@ -2173,6 +2353,11 @@ class TestSlackSendRetry: class TestSlackAllowedUsers: + @staticmethod + def _submit_coro(coro, loop): + coro.close() + return MagicMock() + def test_numeric_allowed_users_match_string_event_user_id(self): from app.channels.slack import SlackChannel @@ -2194,13 +2379,9 @@ class TestSlackAllowedUsers: "ts": "1710000000.000100", } - def submit_coro(coro, loop): - coro.close() - return MagicMock() - with patch( "app.channels.slack.asyncio.run_coroutine_threadsafe", - side_effect=submit_coro, + side_effect=self._submit_coro, ) as submit: channel._handle_message_event(event) @@ -2212,6 +2393,74 @@ class TestSlackAllowedUsers: assert inbound.chat_id == "C123" assert inbound.text == "hello from slack" + def test_string_allowed_users_match_event_user_id(self): + from app.channels.slack import SlackChannel + + bus = MessageBus() + bus.publish_inbound = AsyncMock() + channel = SlackChannel( + bus=bus, + config={"allowed_users": "U123456"}, + ) + channel._loop = MagicMock() + channel._loop.is_running.return_value = True + channel._add_reaction = MagicMock() + channel._send_running_reply = MagicMock() + + event = { + "user": "U123456", + "text": "hello from slack", + "channel": "C123", + "ts": "1710000000.000100", + } + + with patch( + "app.channels.slack.asyncio.run_coroutine_threadsafe", + side_effect=self._submit_coro, + ) as submit: + channel._handle_message_event(event) + + channel._add_reaction.assert_called_once_with("C123", "1710000000.000100", "eyes") + channel._send_running_reply.assert_called_once_with("C123", "1710000000.000100") + submit.assert_called_once() + inbound = bus.publish_inbound.call_args.args[0] + assert inbound.user_id == "U123456" + assert inbound.chat_id == "C123" + assert inbound.text == "hello from slack" + + def test_scalar_allowed_users_warns_and_matches_stringified_event_user_id(self, caplog): + from app.channels.slack import SlackChannel + + bus = MessageBus() + bus.publish_inbound = AsyncMock() + with caplog.at_level("WARNING"): + channel = SlackChannel( + bus=bus, + config={"allowed_users": 123456}, + ) + channel._loop = MagicMock() + channel._loop.is_running.return_value = True + channel._add_reaction = MagicMock() + channel._send_running_reply = MagicMock() + + event = { + "user": "123456", + "text": "hello from slack", + "channel": "C123", + "ts": "1710000000.000100", + } + + with patch( + "app.channels.slack.asyncio.run_coroutine_threadsafe", + side_effect=self._submit_coro, + ) as submit: + channel._handle_message_event(event) + + assert "Slack allowed_users should be a list" in caplog.text + submit.assert_called_once() + inbound = bus.publish_inbound.call_args.args[0] + assert inbound.user_id == "123456" + def test_raises_after_all_retries_exhausted(self): from app.channels.slack import SlackChannel diff --git a/backend/tests/test_checkpointer.py b/backend/tests/test_checkpointer.py index 44db0e2d1..5a31cfb78 100644 --- a/backend/tests/test_checkpointer.py +++ b/backend/tests/test_checkpointer.py @@ -6,13 +6,13 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest import deerflow.config.app_config as app_config_module -from deerflow.agents.checkpointer import get_checkpointer, reset_checkpointer from deerflow.config.checkpointer_config import ( CheckpointerConfig, get_checkpointer_config, load_checkpointer_config_from_dict, set_checkpointer_config, ) +from deerflow.runtime.checkpointer import get_checkpointer, reset_checkpointer @pytest.fixture(autouse=True) @@ -78,7 +78,7 @@ class TestGetCheckpointer: """get_checkpointer should return InMemorySaver when not configured.""" from langgraph.checkpoint.memory import InMemorySaver - with patch("deerflow.agents.checkpointer.provider.get_app_config", side_effect=FileNotFoundError): + with patch("deerflow.runtime.checkpointer.provider.get_app_config", side_effect=FileNotFoundError): cp = get_checkpointer() assert cp is not None assert isinstance(cp, InMemorySaver) @@ -174,9 +174,9 @@ class TestGetCheckpointer: with ( patch.dict(sys.modules, {"langgraph.checkpoint.sqlite": mock_module}), - patch("deerflow.agents.checkpointer.provider.ensure_sqlite_parent_dir") as mock_ensure, + patch("deerflow.runtime.checkpointer.provider.ensure_sqlite_parent_dir") as mock_ensure, patch( - "deerflow.agents.checkpointer.provider.resolve_sqlite_conn_str", + "deerflow.runtime.checkpointer.provider.resolve_sqlite_conn_str", return_value="/tmp/resolved/relative/test.db", ), ): @@ -210,11 +210,11 @@ class TestGetCheckpointer: with ( patch.dict(sys.modules, {"langgraph.checkpoint.sqlite": mock_module}), patch( - "deerflow.agents.checkpointer.provider.ensure_sqlite_parent_dir", + "deerflow.runtime.checkpointer.provider.ensure_sqlite_parent_dir", side_effect=record_ensure, ), patch( - "deerflow.agents.checkpointer.provider.resolve_sqlite_conn_str", + "deerflow.runtime.checkpointer.provider.resolve_sqlite_conn_str", return_value="/tmp/resolved/relative/test.db", ), ): @@ -251,7 +251,7 @@ class TestAsyncCheckpointer: @pytest.mark.anyio async def test_sqlite_creates_parent_dir_via_to_thread(self): """Async SQLite setup should move mkdir off the event loop.""" - from deerflow.agents.checkpointer.async_provider import make_checkpointer + from deerflow.runtime.checkpointer.async_provider import make_checkpointer mock_config = MagicMock() mock_config.checkpointer = CheckpointerConfig(type="sqlite", connection_string="relative/test.db") @@ -268,11 +268,11 @@ class TestAsyncCheckpointer: mock_module.AsyncSqliteSaver = mock_saver_cls with ( - patch("deerflow.agents.checkpointer.async_provider.get_app_config", return_value=mock_config), + patch("deerflow.runtime.checkpointer.async_provider.get_app_config", return_value=mock_config), patch.dict(sys.modules, {"langgraph.checkpoint.sqlite.aio": mock_module}), - patch("deerflow.agents.checkpointer.async_provider.asyncio.to_thread", new_callable=AsyncMock) as mock_to_thread, + patch("deerflow.runtime.checkpointer.async_provider.asyncio.to_thread", new_callable=AsyncMock) as mock_to_thread, patch( - "deerflow.agents.checkpointer.async_provider.resolve_sqlite_conn_str", + "deerflow.runtime.checkpointer.async_provider.resolve_sqlite_conn_str", return_value="/tmp/resolved/test.db", ), ): diff --git a/backend/tests/test_checkpointer_none_fix.py b/backend/tests/test_checkpointer_none_fix.py index 4e128adbc..3c7a25fa1 100644 --- a/backend/tests/test_checkpointer_none_fix.py +++ b/backend/tests/test_checkpointer_none_fix.py @@ -12,13 +12,14 @@ class TestCheckpointerNoneFix: @pytest.mark.anyio async def test_async_make_checkpointer_returns_in_memory_saver_when_not_configured(self): """make_checkpointer should return InMemorySaver when config.checkpointer is None.""" - from deerflow.agents.checkpointer.async_provider import make_checkpointer + from deerflow.runtime.checkpointer.async_provider import make_checkpointer - # Mock get_app_config to return a config with checkpointer=None + # Mock get_app_config to return a config with checkpointer=None and database=None mock_config = MagicMock() mock_config.checkpointer = None + mock_config.database = None - with patch("deerflow.agents.checkpointer.async_provider.get_app_config", return_value=mock_config): + with patch("deerflow.runtime.checkpointer.async_provider.get_app_config", return_value=mock_config): async with make_checkpointer() as checkpointer: # Should return InMemorySaver, not None assert checkpointer is not None @@ -35,13 +36,13 @@ class TestCheckpointerNoneFix: def test_sync_checkpointer_context_returns_in_memory_saver_when_not_configured(self): """checkpointer_context should return InMemorySaver when config.checkpointer is None.""" - from deerflow.agents.checkpointer.provider import checkpointer_context + from deerflow.runtime.checkpointer.provider import checkpointer_context # Mock get_app_config to return a config with checkpointer=None mock_config = MagicMock() mock_config.checkpointer = None - with patch("deerflow.agents.checkpointer.provider.get_app_config", return_value=mock_config): + with patch("deerflow.runtime.checkpointer.provider.get_app_config", return_value=mock_config): with checkpointer_context() as checkpointer: # Should return InMemorySaver, not None assert checkpointer is not None diff --git a/backend/tests/test_claude_provider_prompt_caching.py b/backend/tests/test_claude_provider_prompt_caching.py new file mode 100644 index 000000000..e212b7329 --- /dev/null +++ b/backend/tests/test_claude_provider_prompt_caching.py @@ -0,0 +1,249 @@ +"""Tests for ClaudeChatModel._apply_prompt_caching. + +Validates that the function never places more than 4 cache_control breakpoints +(the hard limit enforced by the Anthropic API and AWS Bedrock) regardless of +how many system blocks, message content blocks, or tool definitions are present. +""" + +from unittest import mock + +import pytest + +from deerflow.models.claude_provider import ClaudeChatModel + + +def _make_model(prompt_cache_size: int = 3) -> ClaudeChatModel: + """Return a minimal ClaudeChatModel instance without network calls.""" + with mock.patch.object(ClaudeChatModel, "model_post_init"): + m = ClaudeChatModel( + model="claude-sonnet-4-6", + anthropic_api_key="sk-ant-fake", # type: ignore[call-arg] + prompt_cache_size=prompt_cache_size, + ) + m._is_oauth = False + m.enable_prompt_caching = True + return m + + +def _count_cache_control(payload: dict) -> int: + """Count the total number of cache_control markers in a payload.""" + count = 0 + + system = payload.get("system", []) + if isinstance(system, list): + for block in system: + if isinstance(block, dict) and "cache_control" in block: + count += 1 + + for msg in payload.get("messages", []): + if not isinstance(msg, dict): + continue + content = msg.get("content", []) + if isinstance(content, list): + for block in content: + if isinstance(block, dict) and "cache_control" in block: + count += 1 + + for tool in payload.get("tools", []): + if isinstance(tool, dict) and "cache_control" in tool: + count += 1 + + return count + + +@pytest.fixture() +def model() -> ClaudeChatModel: + return _make_model() + + +# --------------------------------------------------------------------------- +# Basic correctness +# --------------------------------------------------------------------------- + + +def test_single_system_block_gets_cached(model): + payload: dict = {"system": [{"type": "text", "text": "sys"}]} + model._apply_prompt_caching(payload) + assert payload["system"][0].get("cache_control") == {"type": "ephemeral"} + + +def test_string_system_converted_and_cached(model): + payload: dict = {"system": "you are helpful"} + model._apply_prompt_caching(payload) + assert isinstance(payload["system"], list) + assert payload["system"][0].get("cache_control") == {"type": "ephemeral"} + + +def test_last_tool_gets_cached_when_budget_allows(model): + payload: dict = { + "tools": [{"name": "t1"}, {"name": "t2"}], + } + model._apply_prompt_caching(payload) + # With no system or messages the last tool should be cached. + assert payload["tools"][-1].get("cache_control") == {"type": "ephemeral"} + assert "cache_control" not in payload["tools"][0] + + +def test_recent_messages_get_cached(model): + """The last prompt_cache_size messages' content blocks should be cached.""" + payload: dict = { + "messages": [ + {"role": "user", "content": [{"type": "text", "text": "hello"}]}, + ], + } + model._apply_prompt_caching(payload) + assert payload["messages"][0]["content"][0].get("cache_control") == {"type": "ephemeral"} + + +def test_string_message_content_converted_and_cached(model): + payload: dict = { + "messages": [ + {"role": "user", "content": "simple string"}, + ], + } + model._apply_prompt_caching(payload) + assert isinstance(payload["messages"][0]["content"], list) + assert payload["messages"][0]["content"][0].get("cache_control") == {"type": "ephemeral"} + + +# --------------------------------------------------------------------------- +# Budget enforcement (the core regression test for issue #2448) +# --------------------------------------------------------------------------- + + +def test_never_exceeds_4_breakpoints_with_large_system(model): + """Many system text blocks must not produce more than 4 breakpoints total.""" + payload: dict = { + "system": [{"type": "text", "text": f"sys {i}"} for i in range(6)], + "tools": [{"name": "t1"}], + } + model._apply_prompt_caching(payload) + assert _count_cache_control(payload) <= 4 + + +def test_never_exceeds_4_breakpoints_multi_turn_with_multi_block_messages(model): + """Multi-turn conversation where each message has multiple content blocks.""" + # 1 system block + 3 messages × 2 blocks + 1 tool = 8 candidates → must cap at 4 + payload: dict = { + "system": [{"type": "text", "text": "system prompt"}], + "messages": [ + { + "role": "user", + "content": [ + {"type": "text", "text": "user text"}, + {"type": "tool_result", "tool_use_id": "x", "content": "result"}, + ], + }, + { + "role": "assistant", + "content": [ + {"type": "text", "text": "assistant text"}, + {"type": "tool_use", "id": "y", "name": "bash", "input": {}}, + ], + }, + { + "role": "user", + "content": [ + {"type": "text", "text": "follow up"}, + {"type": "text", "text": "second block"}, + ], + }, + ], + "tools": [{"name": "bash"}], + } + model._apply_prompt_caching(payload) + total = _count_cache_control(payload) + assert total <= 4, f"Expected ≤ 4 breakpoints, got {total}" + + +def test_never_exceeds_4_breakpoints_many_messages(model): + """Large number of messages with multiple blocks per message.""" + messages = [] + for i in range(10): + messages.append( + { + "role": "user", + "content": [ + {"type": "text", "text": f"msg {i} block a"}, + {"type": "text", "text": f"msg {i} block b"}, + ], + } + ) + payload: dict = { + "system": [{"type": "text", "text": "sys 1"}, {"type": "text", "text": "sys 2"}], + "messages": messages, + "tools": [{"name": "tool_a"}, {"name": "tool_b"}], + } + model._apply_prompt_caching(payload) + total = _count_cache_control(payload) + assert total <= 4, f"Expected ≤ 4 breakpoints, got {total}" + + +def test_exactly_4_breakpoints_when_4_or_more_candidates(model): + """When there are at least 4 candidates, exactly 4 breakpoints are placed.""" + payload: dict = { + "system": [{"type": "text", "text": f"sys {i}"} for i in range(3)], + "messages": [ + {"role": "user", "content": [{"type": "text", "text": "user"}]}, + {"role": "assistant", "content": [{"type": "text", "text": "asst"}]}, + {"role": "user", "content": [{"type": "text", "text": "follow"}]}, + ], + "tools": [{"name": "bash"}], + } + model._apply_prompt_caching(payload) + total = _count_cache_control(payload) + assert total == 4 + + +def test_breakpoints_placed_on_last_candidates(model): + """Breakpoints should be on the *last* candidates, not the first.""" + # 5 system blocks but budget = 4 → first system block should NOT be cached, + # last 4 (indices 1-4) should be. + payload: dict = { + "system": [{"type": "text", "text": f"sys {i}"} for i in range(5)], + } + model._apply_prompt_caching(payload) + # First block is NOT in the last-4 window + assert "cache_control" not in payload["system"][0] + # Last 4 blocks ARE cached + for i in range(1, 5): + assert payload["system"][i].get("cache_control") == {"type": "ephemeral"}, f"block {i} should be cached" + + +# --------------------------------------------------------------------------- +# Edge cases +# --------------------------------------------------------------------------- + + +def test_no_candidates_is_a_no_op(model): + payload: dict = {} + model._apply_prompt_caching(payload) + assert _count_cache_control(payload) == 0 + + +def test_non_text_system_blocks_not_added_as_candidates(model): + """Image blocks in system should not receive cache_control.""" + payload: dict = { + "system": [ + {"type": "image", "source": {"type": "base64", "media_type": "image/png", "data": "abc"}}, + {"type": "text", "text": "text block"}, + ], + } + model._apply_prompt_caching(payload) + assert "cache_control" not in payload["system"][0] + assert payload["system"][1].get("cache_control") == {"type": "ephemeral"} + + +def test_old_messages_outside_cache_window_not_cached(model): + """Messages older than prompt_cache_size should not be cached.""" + m = _make_model(prompt_cache_size=1) + payload: dict = { + "messages": [ + {"role": "user", "content": [{"type": "text", "text": "old message"}]}, + {"role": "user", "content": [{"type": "text", "text": "recent message"}]}, + ], + } + m._apply_prompt_caching(payload) + # Only the last message should be within the cache window + assert "cache_control" not in payload["messages"][0]["content"][0] + assert payload["messages"][1]["content"][0].get("cache_control") == {"type": "ephemeral"} diff --git a/backend/tests/test_client.py b/backend/tests/test_client.py index 14b52d077..8397af163 100644 --- a/backend/tests/test_client.py +++ b/backend/tests/test_client.py @@ -43,12 +43,27 @@ def mock_app_config(): @pytest.fixture -def client(mock_app_config): +def client(mock_app_config, tmp_path): """Create a DeerFlowClient with mocked config loading.""" + import deerflow.skills.storage as _storage_mod + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + + _storage_mod._default_skill_storage = LocalSkillStorage(host_path=str(tmp_path)) with patch("deerflow.client.get_app_config", return_value=mock_app_config): return DeerFlowClient() +@pytest.fixture +def allow_skill_security_scan(): + async def _scan(*args, **kwargs): + from deerflow.skills.security_scanner import ScanResult + + return ScanResult(decision="allow", reason="ok") + + with patch("deerflow.skills.installer.scan_skill_content", _scan): + yield + + # --------------------------------------------------------------------------- # __init__ # --------------------------------------------------------------------------- @@ -124,7 +139,7 @@ class TestConfigQueries: skill.category = "public" skill.enabled = True - with patch("deerflow.skills.loader.load_skills", return_value=[skill]) as mock_load: + with patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", return_value=[skill]) as mock_load: result = client.list_skills() mock_load.assert_called_once_with(enabled_only=False) @@ -139,7 +154,7 @@ class TestConfigQueries: } def test_list_skills_enabled_only(self, client): - with patch("deerflow.skills.loader.load_skills", return_value=[]) as mock_load: + with patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", return_value=[]) as mock_load: client.list_skills(enabled_only=True) mock_load.assert_called_once_with(enabled_only=True) @@ -819,7 +834,7 @@ class TestEnsureAgent: patch("deerflow.client._build_middlewares", return_value=[]) as mock_build_middlewares, patch("deerflow.client.apply_prompt_template", return_value="prompt") as mock_apply_prompt, patch.object(client, "_get_tools", return_value=[]), - patch("deerflow.agents.checkpointer.get_checkpointer", return_value=MagicMock()), + patch("deerflow.runtime.checkpointer.get_checkpointer", return_value=MagicMock()), ): client._agent_name = "custom-agent" client._available_skills = {"test_skill"} @@ -844,7 +859,7 @@ class TestEnsureAgent: patch("deerflow.client._build_middlewares", return_value=[]), patch("deerflow.client.apply_prompt_template", return_value="prompt"), patch.object(client, "_get_tools", return_value=[]), - patch("deerflow.agents.checkpointer.get_checkpointer", return_value=mock_checkpointer), + patch("deerflow.runtime.checkpointer.get_checkpointer", return_value=mock_checkpointer), ): client._ensure_agent(config) @@ -869,7 +884,7 @@ class TestEnsureAgent: patch("deerflow.client._build_middlewares", side_effect=fake_build_middlewares), patch("deerflow.client.apply_prompt_template", return_value="prompt"), patch.object(client, "_get_tools", return_value=[]), - patch("deerflow.agents.checkpointer.get_checkpointer", return_value=MagicMock()), + patch("deerflow.runtime.checkpointer.get_checkpointer", return_value=MagicMock()), ): client._ensure_agent(config) @@ -888,7 +903,7 @@ class TestEnsureAgent: patch("deerflow.client._build_middlewares", return_value=[]), patch("deerflow.client.apply_prompt_template", return_value="prompt"), patch.object(client, "_get_tools", return_value=[]), - patch("deerflow.agents.checkpointer.get_checkpointer", return_value=None), + patch("deerflow.runtime.checkpointer.get_checkpointer", return_value=None), ): client._ensure_agent(config) @@ -1017,7 +1032,7 @@ class TestThreadQueries: mock_checkpointer = MagicMock() mock_checkpointer.list.return_value = [] - with patch("deerflow.agents.checkpointer.provider.get_checkpointer", return_value=mock_checkpointer): + with patch("deerflow.runtime.checkpointer.provider.get_checkpointer", return_value=mock_checkpointer): # No internal checkpointer, should fetch from provider result = client.list_threads() @@ -1071,7 +1086,7 @@ class TestThreadQueries: mock_checkpointer = MagicMock() mock_checkpointer.list.return_value = [] - with patch("deerflow.agents.checkpointer.provider.get_checkpointer", return_value=mock_checkpointer): + with patch("deerflow.runtime.checkpointer.provider.get_checkpointer", return_value=mock_checkpointer): result = client.get_thread("t99") assert result["thread_id"] == "t99" @@ -1152,13 +1167,13 @@ class TestSkillsManagement: def test_get_skill_found(self, client): skill = self._make_skill() - with patch("deerflow.skills.loader.load_skills", return_value=[skill]): + with patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", return_value=[skill]): result = client.get_skill("test-skill") assert result is not None assert result["name"] == "test-skill" def test_get_skill_not_found(self, client): - with patch("deerflow.skills.loader.load_skills", return_value=[]): + with patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", return_value=[]): result = client.get_skill("nonexistent") assert result is None @@ -1179,7 +1194,7 @@ class TestSkillsManagement: client._agent = MagicMock() with ( - patch("deerflow.skills.loader.load_skills", side_effect=[[skill], [updated_skill]]), + patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", side_effect=[[skill], [updated_skill]]), patch("deerflow.client.ExtensionsConfig.resolve_config_path", return_value=tmp_path), patch("deerflow.client.get_extensions_config", return_value=ext_config), patch("deerflow.client.reload_extensions_config"), @@ -1191,11 +1206,11 @@ class TestSkillsManagement: tmp_path.unlink() def test_update_skill_not_found(self, client): - with patch("deerflow.skills.loader.load_skills", return_value=[]): + with patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", return_value=[]): with pytest.raises(ValueError, match="not found"): client.update_skill("nonexistent", enabled=True) - def test_install_skill(self, client): + def test_install_skill(self, client, allow_skill_security_scan): with tempfile.TemporaryDirectory() as tmp: tmp_path = Path(tmp) @@ -1211,7 +1226,9 @@ class TestSkillsManagement: skills_root = tmp_path / "skills" (skills_root / "custom").mkdir(parents=True) - with patch("deerflow.skills.installer.get_skills_root_path", return_value=skills_root): + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + + with patch("deerflow.skills.storage._default_skill_storage", LocalSkillStorage(host_path=str(skills_root))): result = client.install_skill(archive_path) assert result["success"] is True @@ -1243,7 +1260,10 @@ class TestMemoryManagement: with patch("deerflow.agents.memory.updater.import_memory_data", return_value=imported) as mock_import: result = client.import_memory(imported) - mock_import.assert_called_once_with(imported) + assert mock_import.call_count == 1 + call_args = mock_import.call_args + assert call_args.args == (imported,) + assert "user_id" in call_args.kwargs assert result == imported def test_reload_memory(self, client): @@ -1489,9 +1509,12 @@ class TestUploads: class TestArtifacts: def test_get_artifact(self, client): + from deerflow.runtime.user_context import get_effective_user_id + with tempfile.TemporaryDirectory() as tmp: paths = Paths(base_dir=tmp) - outputs = paths.sandbox_outputs_dir("t1") + user_id = get_effective_user_id() + outputs = paths.sandbox_outputs_dir("t1", user_id=user_id) outputs.mkdir(parents=True) (outputs / "result.txt").write_text("artifact content") @@ -1502,9 +1525,12 @@ class TestArtifacts: assert "text" in mime def test_get_artifact_not_found(self, client): + from deerflow.runtime.user_context import get_effective_user_id + with tempfile.TemporaryDirectory() as tmp: paths = Paths(base_dir=tmp) - paths.sandbox_user_data_dir("t1").mkdir(parents=True) + user_id = get_effective_user_id() + paths.sandbox_outputs_dir("t1", user_id=user_id).mkdir(parents=True) with patch("deerflow.client.get_paths", return_value=paths): with pytest.raises(FileNotFoundError): @@ -1515,9 +1541,12 @@ class TestArtifacts: client.get_artifact("t1", "bad/path/file.txt") def test_get_artifact_path_traversal(self, client): + from deerflow.runtime.user_context import get_effective_user_id + with tempfile.TemporaryDirectory() as tmp: paths = Paths(base_dir=tmp) - paths.sandbox_user_data_dir("t1").mkdir(parents=True) + user_id = get_effective_user_id() + paths.sandbox_outputs_dir("t1", user_id=user_id).mkdir(parents=True) with patch("deerflow.client.get_paths", return_value=paths): with pytest.raises(PathTraversalError): @@ -1701,13 +1730,16 @@ class TestScenarioFileLifecycle: def test_upload_then_read_artifact(self, client): """Upload a file, simulate agent producing artifact, read it back.""" + from deerflow.runtime.user_context import get_effective_user_id + with tempfile.TemporaryDirectory() as tmp: tmp_path = Path(tmp) uploads_dir = tmp_path / "uploads" uploads_dir.mkdir() paths = Paths(base_dir=tmp_path) - outputs_dir = paths.sandbox_outputs_dir("t-artifact") + user_id = get_effective_user_id() + outputs_dir = paths.sandbox_outputs_dir("t-artifact", user_id=user_id) outputs_dir.mkdir(parents=True) # Upload phase @@ -1759,12 +1791,12 @@ class TestScenarioConfigManagement: skill.category = "public" skill.enabled = True - with patch("deerflow.skills.loader.load_skills", return_value=[skill]): + with patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", return_value=[skill]): skills_result = client.list_skills() assert len(skills_result["skills"]) == 1 # Get specific skill - with patch("deerflow.skills.loader.load_skills", return_value=[skill]): + with patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", return_value=[skill]): detail = client.get_skill("web-search") assert detail is not None assert detail["enabled"] is True @@ -1815,7 +1847,7 @@ class TestScenarioConfigManagement: client._agent = MagicMock() # Simulate re-created agent with ( - patch("deerflow.skills.loader.load_skills", side_effect=[[skill], [toggled]]), + patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", side_effect=[[skill], [toggled]]), patch("deerflow.client.ExtensionsConfig.resolve_config_path", return_value=config_file), patch("deerflow.client.get_extensions_config", return_value=ext_config), patch("deerflow.client.reload_extensions_config"), @@ -1846,7 +1878,7 @@ class TestScenarioAgentRecreation: patch("deerflow.client._build_middlewares", return_value=[]), patch("deerflow.client.apply_prompt_template", return_value="prompt"), patch.object(client, "_get_tools", return_value=[]), - patch("deerflow.agents.checkpointer.get_checkpointer", return_value=MagicMock()), + patch("deerflow.runtime.checkpointer.get_checkpointer", return_value=MagicMock()), ): client._ensure_agent(config_a) first_agent = client._agent @@ -1874,7 +1906,7 @@ class TestScenarioAgentRecreation: patch("deerflow.client._build_middlewares", return_value=[]), patch("deerflow.client.apply_prompt_template", return_value="prompt"), patch.object(client, "_get_tools", return_value=[]), - patch("deerflow.agents.checkpointer.get_checkpointer", return_value=MagicMock()), + patch("deerflow.runtime.checkpointer.get_checkpointer", return_value=MagicMock()), ): client._ensure_agent(config) client._ensure_agent(config) @@ -1899,7 +1931,7 @@ class TestScenarioAgentRecreation: patch("deerflow.client._build_middlewares", return_value=[]), patch("deerflow.client.apply_prompt_template", return_value="prompt"), patch.object(client, "_get_tools", return_value=[]), - patch("deerflow.agents.checkpointer.get_checkpointer", return_value=MagicMock()), + patch("deerflow.runtime.checkpointer.get_checkpointer", return_value=MagicMock()), ): client._ensure_agent(config) client.reset_agent() @@ -1957,11 +1989,14 @@ class TestScenarioThreadIsolation: def test_artifacts_isolated_per_thread(self, client): """Artifacts in thread-A are not accessible from thread-B.""" + from deerflow.runtime.user_context import get_effective_user_id + with tempfile.TemporaryDirectory() as tmp: paths = Paths(base_dir=tmp) - outputs_a = paths.sandbox_outputs_dir("thread-a") + user_id = get_effective_user_id() + outputs_a = paths.sandbox_outputs_dir("thread-a", user_id=user_id) outputs_a.mkdir(parents=True) - paths.sandbox_user_data_dir("thread-b").mkdir(parents=True) + paths.sandbox_outputs_dir("thread-b", user_id=user_id).mkdir(parents=True) (outputs_a / "result.txt").write_text("thread-a artifact") with patch("deerflow.client.get_paths", return_value=paths): @@ -2015,7 +2050,7 @@ class TestScenarioMemoryWorkflow: class TestScenarioSkillInstallAndUse: """Scenario: Install a skill → verify it appears → toggle it.""" - def test_install_then_toggle(self, client): + def test_install_then_toggle(self, client, allow_skill_security_scan): """Install .skill archive → list to verify → disable → verify disabled.""" with tempfile.TemporaryDirectory() as tmp: tmp_path = Path(tmp) @@ -2032,7 +2067,9 @@ class TestScenarioSkillInstallAndUse: (skills_root / "custom").mkdir(parents=True) # Step 1: Install - with patch("deerflow.skills.installer.get_skills_root_path", return_value=skills_root): + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + + with patch("deerflow.skills.storage._default_skill_storage", LocalSkillStorage(host_path=str(skills_root))): result = client.install_skill(archive) assert result["success"] is True assert (skills_root / "custom" / "my-analyzer" / "SKILL.md").exists() @@ -2045,7 +2082,7 @@ class TestScenarioSkillInstallAndUse: installed_skill.category = "custom" installed_skill.enabled = True - with patch("deerflow.skills.loader.load_skills", return_value=[installed_skill]): + with patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", return_value=[installed_skill]): skills_result = client.list_skills() assert any(s["name"] == "my-analyzer" for s in skills_result["skills"]) @@ -2065,7 +2102,7 @@ class TestScenarioSkillInstallAndUse: config_file.write_text("{}") with ( - patch("deerflow.skills.loader.load_skills", side_effect=[[installed_skill], [disabled_skill]]), + patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", side_effect=[[installed_skill], [disabled_skill]]), patch("deerflow.client.ExtensionsConfig.resolve_config_path", return_value=config_file), patch("deerflow.client.get_extensions_config", return_value=ext_config), patch("deerflow.client.reload_extensions_config"), @@ -2239,7 +2276,7 @@ class TestGatewayConformance: skill.category = "public" skill.enabled = True - with patch("deerflow.skills.loader.load_skills", return_value=[skill]): + with patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", return_value=[skill]): result = client.list_skills() parsed = SkillsListResponse(**result) @@ -2254,14 +2291,14 @@ class TestGatewayConformance: skill.category = "public" skill.enabled = True - with patch("deerflow.skills.loader.load_skills", return_value=[skill]): + with patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", return_value=[skill]): result = client.get_skill("web-search") assert result is not None parsed = SkillResponse(**result) assert parsed.name == "web-search" - def test_install_skill(self, client, tmp_path): + def test_install_skill(self, client, tmp_path, allow_skill_security_scan): skill_dir = tmp_path / "my-skill" skill_dir.mkdir() (skill_dir / "SKILL.md").write_text("---\nname: my-skill\ndescription: A test skill\n---\nBody\n") @@ -2270,7 +2307,9 @@ class TestGatewayConformance: with zipfile.ZipFile(archive, "w") as zf: zf.write(skill_dir / "SKILL.md", "my-skill/SKILL.md") - with patch("deerflow.skills.installer.get_skills_root_path", return_value=tmp_path): + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + + with patch("deerflow.skills.storage._default_skill_storage", LocalSkillStorage(host_path=str(tmp_path))): result = client.install_skill(archive) parsed = SkillInstallResponse(**result) @@ -2424,8 +2463,10 @@ class TestInstallSkillSecurity: def patched_extract(zf, dest, max_total_size=100): return orig(zf, dest, max_total_size=100) + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + with ( - patch("deerflow.skills.installer.get_skills_root_path", return_value=skills_root), + patch("deerflow.skills.storage._default_skill_storage", LocalSkillStorage(host_path=str(skills_root))), patch("deerflow.skills.installer.safe_extract_skill_archive", side_effect=patched_extract), ): with pytest.raises(ValueError, match="too large"): @@ -2441,7 +2482,9 @@ class TestInstallSkillSecurity: skills_root = Path(tmp) / "skills" (skills_root / "custom").mkdir(parents=True) - with patch("deerflow.skills.installer.get_skills_root_path", return_value=skills_root): + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + + with patch("deerflow.skills.storage._default_skill_storage", LocalSkillStorage(host_path=str(skills_root))): with pytest.raises(ValueError, match="unsafe"): client.install_skill(archive) @@ -2455,11 +2498,13 @@ class TestInstallSkillSecurity: skills_root = Path(tmp) / "skills" (skills_root / "custom").mkdir(parents=True) - with patch("deerflow.skills.installer.get_skills_root_path", return_value=skills_root): + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + + with patch("deerflow.skills.storage._default_skill_storage", LocalSkillStorage(host_path=str(skills_root))): with pytest.raises(ValueError, match="unsafe"): client.install_skill(archive) - def test_symlinks_skipped_during_extraction(self, client): + def test_symlinks_skipped_during_extraction(self, client, allow_skill_security_scan): """Symlink entries in the archive are skipped (never written to disk).""" import stat as stat_mod @@ -2477,7 +2522,9 @@ class TestInstallSkillSecurity: skills_root = tmp_path / "skills" (skills_root / "custom").mkdir(parents=True) - with patch("deerflow.skills.installer.get_skills_root_path", return_value=skills_root): + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + + with patch("deerflow.skills.storage._default_skill_storage", LocalSkillStorage(host_path=str(skills_root))): result = client.install_skill(archive) assert result["success"] is True @@ -2501,9 +2548,11 @@ class TestInstallSkillSecurity: skills_root = tmp_path / "skills" (skills_root / "custom").mkdir(parents=True) + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + with ( - patch("deerflow.skills.installer.get_skills_root_path", return_value=skills_root), - patch("deerflow.skills.installer._validate_skill_frontmatter", return_value=(True, "OK", "../evil")), + patch("deerflow.skills.storage._default_skill_storage", LocalSkillStorage(host_path=str(skills_root))), + patch("deerflow.skills.validation._validate_skill_frontmatter", return_value=(True, "OK", "../evil")), ): with pytest.raises(ValueError, match="Invalid skill name"): client.install_skill(archive) @@ -2524,9 +2573,11 @@ class TestInstallSkillSecurity: skills_root = tmp_path / "skills" (skills_root / "custom" / "dupe-skill").mkdir(parents=True) + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + with ( - patch("deerflow.skills.installer.get_skills_root_path", return_value=skills_root), - patch("deerflow.skills.installer._validate_skill_frontmatter", return_value=(True, "OK", "dupe-skill")), + patch("deerflow.skills.storage._default_skill_storage", LocalSkillStorage(host_path=str(skills_root))), + patch("deerflow.skills.validation._validate_skill_frontmatter", return_value=(True, "OK", "dupe-skill")), ): with pytest.raises(ValueError, match="already exists"): client.install_skill(archive) @@ -2541,7 +2592,9 @@ class TestInstallSkillSecurity: skills_root = Path(tmp) / "skills" (skills_root / "custom").mkdir(parents=True) - with patch("deerflow.skills.installer.get_skills_root_path", return_value=skills_root): + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + + with patch("deerflow.skills.storage._default_skill_storage", LocalSkillStorage(host_path=str(skills_root))): with pytest.raises(ValueError, match="empty"): client.install_skill(archive) @@ -2560,9 +2613,11 @@ class TestInstallSkillSecurity: skills_root = tmp_path / "skills" (skills_root / "custom").mkdir(parents=True) + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + with ( - patch("deerflow.skills.installer.get_skills_root_path", return_value=skills_root), - patch("deerflow.skills.installer._validate_skill_frontmatter", return_value=(False, "Missing name field", "")), + patch("deerflow.skills.storage._default_skill_storage", LocalSkillStorage(host_path=str(skills_root))), + patch("deerflow.skills.validation._validate_skill_frontmatter", return_value=(False, "Missing name field", "")), ): with pytest.raises(ValueError, match="Invalid skill"): client.install_skill(archive) @@ -2654,7 +2709,7 @@ class TestConfigUpdateErrors: skill.name = "some-skill" with ( - patch("deerflow.skills.loader.load_skills", return_value=[skill]), + patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", return_value=[skill]), patch("deerflow.client.ExtensionsConfig.resolve_config_path", return_value=None), ): with pytest.raises(FileNotFoundError, match="Cannot locate"): @@ -2674,7 +2729,7 @@ class TestConfigUpdateErrors: config_file.write_text("{}") with ( - patch("deerflow.skills.loader.load_skills", side_effect=[[skill], []]), + patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", side_effect=[[skill], []]), patch("deerflow.client.ExtensionsConfig.resolve_config_path", return_value=config_file), patch("deerflow.client.get_extensions_config", return_value=ext_config), patch("deerflow.client.reload_extensions_config"), @@ -2869,9 +2924,12 @@ class TestUploadDeleteSymlink: class TestArtifactHardening: def test_artifact_directory_rejected(self, client): """get_artifact rejects paths that resolve to a directory.""" + from deerflow.runtime.user_context import get_effective_user_id + with tempfile.TemporaryDirectory() as tmp: paths = Paths(base_dir=tmp) - subdir = paths.sandbox_outputs_dir("t1") / "subdir" + user_id = get_effective_user_id() + subdir = paths.sandbox_outputs_dir("t1", user_id=user_id) / "subdir" subdir.mkdir(parents=True) with patch("deerflow.client.get_paths", return_value=paths): @@ -2880,9 +2938,12 @@ class TestArtifactHardening: def test_artifact_leading_slash_stripped(self, client): """Paths with leading slash are handled correctly.""" + from deerflow.runtime.user_context import get_effective_user_id + with tempfile.TemporaryDirectory() as tmp: paths = Paths(base_dir=tmp) - outputs = paths.sandbox_outputs_dir("t1") + user_id = get_effective_user_id() + outputs = paths.sandbox_outputs_dir("t1", user_id=user_id) outputs.mkdir(parents=True) (outputs / "file.txt").write_text("content") @@ -2996,9 +3057,12 @@ class TestBugArtifactPrefixMatchTooLoose: def test_exact_prefix_without_subpath_accepted(self, client): """Bare 'mnt/user-data' is accepted (will later fail as directory, not at prefix).""" + from deerflow.runtime.user_context import get_effective_user_id + with tempfile.TemporaryDirectory() as tmp: paths = Paths(base_dir=tmp) - paths.sandbox_user_data_dir("t1").mkdir(parents=True) + user_id = get_effective_user_id() + paths.sandbox_outputs_dir("t1", user_id=user_id).mkdir(parents=True) with patch("deerflow.client.get_paths", return_value=paths): # Accepted at prefix check, but fails because it's a directory. @@ -3080,7 +3144,7 @@ class TestBugAgentInvalidationInconsistency: config_file.write_text("{}") with ( - patch("deerflow.skills.loader.load_skills", side_effect=[[skill], [updated]]), + patch("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.load_skills", side_effect=[[skill], [updated]]), patch("deerflow.client.ExtensionsConfig.resolve_config_path", return_value=config_file), patch("deerflow.client.get_extensions_config", return_value=ext_config), patch("deerflow.client.reload_extensions_config"), diff --git a/backend/tests/test_client_e2e.py b/backend/tests/test_client_e2e.py index b26e5bff1..0c3872e41 100644 --- a/backend/tests/test_client_e2e.py +++ b/backend/tests/test_client_e2e.py @@ -23,8 +23,6 @@ from dotenv import load_dotenv from deerflow.client import DeerFlowClient, StreamEvent from deerflow.config.app_config import AppConfig -from deerflow.config.model_config import ModelConfig -from deerflow.config.sandbox_config import SandboxConfig # Load .env from project root (for OPENAI_API_KEY etc.) load_dotenv(os.path.join(os.path.dirname(__file__), "../../.env")) @@ -55,24 +53,34 @@ def _make_e2e_config() -> AppConfig: - ``E2E_MODEL_ID`` (default: ``ep-20251211175242-llcmh``) - ``E2E_BASE_URL`` (default: ``https://ark-cn-beijing.bytedance.net/api/v3``) - ``OPENAI_API_KEY`` (required for LLM tests) + + Note: We use model_validate with a raw dict (not AppConfig(models=[ModelConfig(...)])) + because passing already-validated Pydantic instances triggers a pydantic-core + shortcut that returns stale cached data when another AppConfig was previously + loaded from disk in the same process. Dict-based validation is always correct. """ - return AppConfig( - models=[ - ModelConfig( - name=os.getenv("E2E_MODEL_NAME", "volcengine-ark"), - display_name="E2E Test Model", - use=os.getenv("E2E_MODEL_USE", "langchain_openai:ChatOpenAI"), - model=os.getenv("E2E_MODEL_ID", "ep-20251211175242-llcmh"), - base_url=os.getenv("E2E_BASE_URL", "https://ark-cn-beijing.bytedance.net/api/v3"), - api_key=os.getenv("OPENAI_API_KEY", ""), - max_tokens=512, - temperature=0.7, - supports_thinking=False, - supports_reasoning_effort=False, - supports_vision=False, - ) - ], - sandbox=SandboxConfig(use="deerflow.sandbox.local:LocalSandboxProvider", allow_host_bash=True), + return AppConfig.model_validate( + { + "models": [ + { + "name": os.getenv("E2E_MODEL_NAME", "volcengine-ark"), + "display_name": "E2E Test Model", + "use": os.getenv("E2E_MODEL_USE", "langchain_openai:ChatOpenAI"), + "model": os.getenv("E2E_MODEL_ID", "ep-20251211175242-llcmh"), + "base_url": os.getenv("E2E_BASE_URL", "https://ark-cn-beijing.bytedance.net/api/v3"), + "api_key": os.getenv("OPENAI_API_KEY", ""), + "max_tokens": 512, + "temperature": 0.7, + "supports_thinking": False, + "supports_reasoning_effort": False, + "supports_vision": False, + } + ], + "sandbox": { + "use": "deerflow.sandbox.local:LocalSandboxProvider", + "allow_host_bash": True, + }, + } ) @@ -95,10 +103,16 @@ def e2e_env(tmp_path, monkeypatch): monkeypatch.setattr("deerflow.config.paths._paths", None) monkeypatch.setattr("deerflow.sandbox.sandbox_provider._default_sandbox_provider", None) - # 2. Inject a clean AppConfig via the global singleton. + # 2. Inject a clean AppConfig. We must reset _app_config to None BEFORE + # calling _make_e2e_config() because AppConfig() constructor misbehaves when + # a disk config is already cached: it returns the cached model list instead + # of the provided one. Clearing first ensures the test config is correct. + monkeypatch.setattr("deerflow.config.app_config._app_config", None) + monkeypatch.setattr("deerflow.config.app_config._app_config_is_custom", False) config = _make_e2e_config() monkeypatch.setattr("deerflow.config.app_config._app_config", config) monkeypatch.setattr("deerflow.config.app_config._app_config_is_custom", True) + monkeypatch.setattr("deerflow.client.get_app_config", lambda: config) # 3. Disable title generation (extra LLM call, non-deterministic) from deerflow.config.title_config import TitleConfig @@ -262,8 +276,9 @@ class TestFileUploadIntegration: # Physically exists from deerflow.config.paths import get_paths + from deerflow.runtime.user_context import get_effective_user_id - assert (get_paths().sandbox_uploads_dir(tid) / "readme.txt").exists() + assert (get_paths().sandbox_uploads_dir(tid, user_id=get_effective_user_id()) / "readme.txt").exists() def test_upload_duplicate_rename(self, e2e_env, tmp_path): """Uploading two files with the same name auto-renames the second.""" @@ -472,12 +487,13 @@ class TestArtifactAccess: def test_get_artifact_happy_path(self, e2e_env): """Write a file to outputs, then read it back via get_artifact().""" from deerflow.config.paths import get_paths + from deerflow.runtime.user_context import get_effective_user_id c = DeerFlowClient(checkpointer=None, thinking_enabled=False) tid = str(uuid.uuid4()) # Create an output file in the thread's outputs directory - outputs_dir = get_paths().sandbox_outputs_dir(tid) + outputs_dir = get_paths().sandbox_outputs_dir(tid, user_id=get_effective_user_id()) outputs_dir.mkdir(parents=True, exist_ok=True) (outputs_dir / "result.txt").write_text("hello artifact") @@ -488,11 +504,12 @@ class TestArtifactAccess: def test_get_artifact_nested_path(self, e2e_env): """Artifacts in subdirectories are accessible.""" from deerflow.config.paths import get_paths + from deerflow.runtime.user_context import get_effective_user_id c = DeerFlowClient(checkpointer=None, thinking_enabled=False) tid = str(uuid.uuid4()) - outputs_dir = get_paths().sandbox_outputs_dir(tid) + outputs_dir = get_paths().sandbox_outputs_dir(tid, user_id=get_effective_user_id()) sub = outputs_dir / "charts" sub.mkdir(parents=True, exist_ok=True) (sub / "data.json").write_text('{"x": 1}') @@ -522,15 +539,26 @@ class TestArtifactAccess: class TestSkillInstallation: """install_skill() with real ZIP handling and filesystem.""" + @pytest.fixture(autouse=True) + def _allow_skill_security_scan(self, monkeypatch): + async def _scan(*args, **kwargs): + from deerflow.skills.security_scanner import ScanResult + + return ScanResult(decision="allow", reason="ok") + + monkeypatch.setattr("deerflow.skills.installer.scan_skill_content", _scan) + @pytest.fixture(autouse=True) def _isolate_skills_dir(self, tmp_path, monkeypatch): """Redirect skill installation to a temp directory.""" skills_root = tmp_path / "skills" (skills_root / "public").mkdir(parents=True) (skills_root / "custom").mkdir(parents=True) + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + monkeypatch.setattr( - "deerflow.skills.installer.get_skills_root_path", - lambda: skills_root, + "deerflow.skills.storage._default_skill_storage", + LocalSkillStorage(host_path=str(skills_root)), ) self._skills_root = skills_root @@ -605,19 +633,21 @@ class TestConfigManagement: def test_list_models_returns_injected_config(self, e2e_env): """list_models() returns the model from the injected AppConfig.""" + expected_model_name = os.getenv("E2E_MODEL_NAME", "volcengine-ark") c = DeerFlowClient(checkpointer=None, thinking_enabled=False) result = c.list_models() assert "models" in result assert len(result["models"]) == 1 - assert result["models"][0]["name"] == "volcengine-ark" + assert result["models"][0]["name"] == expected_model_name assert result["models"][0]["display_name"] == "E2E Test Model" def test_get_model_found(self, e2e_env): """get_model() returns the model when it exists.""" + expected_model_name = os.getenv("E2E_MODEL_NAME", "volcengine-ark") c = DeerFlowClient(checkpointer=None, thinking_enabled=False) - model = c.get_model("volcengine-ark") + model = c.get_model(expected_model_name) assert model is not None - assert model["name"] == "volcengine-ark" + assert model["name"] == expected_model_name assert model["supports_thinking"] is False def test_get_model_not_found(self, e2e_env): diff --git a/backend/tests/test_converters.py b/backend/tests/test_converters.py new file mode 100644 index 000000000..2c2167e01 --- /dev/null +++ b/backend/tests/test_converters.py @@ -0,0 +1,188 @@ +"""Tests for LangChain-to-OpenAI message format converters.""" + +from __future__ import annotations + +import json +from unittest.mock import MagicMock + +from deerflow.runtime.converters import ( + langchain_messages_to_openai, + langchain_to_openai_completion, + langchain_to_openai_message, +) + + +def _make_ai_message(content="", tool_calls=None, id="msg-123", usage_metadata=None, response_metadata=None): + msg = MagicMock() + msg.type = "ai" + msg.content = content + msg.tool_calls = tool_calls or [] + msg.id = id + msg.usage_metadata = usage_metadata + msg.response_metadata = response_metadata or {} + return msg + + +def _make_human_message(content="Hello"): + msg = MagicMock() + msg.type = "human" + msg.content = content + return msg + + +def _make_system_message(content="You are an assistant."): + msg = MagicMock() + msg.type = "system" + msg.content = content + return msg + + +def _make_tool_message(content="result", tool_call_id="call-abc"): + msg = MagicMock() + msg.type = "tool" + msg.content = content + msg.tool_call_id = tool_call_id + return msg + + +class TestLangchainToOpenaiMessage: + def test_ai_message_text_only(self): + msg = _make_ai_message(content="Hello world") + result = langchain_to_openai_message(msg) + assert result["role"] == "assistant" + assert result["content"] == "Hello world" + assert "tool_calls" not in result + + def test_ai_message_with_tool_calls(self): + tool_calls = [ + {"id": "call-1", "name": "bash", "args": {"command": "ls"}}, + ] + msg = _make_ai_message(content="", tool_calls=tool_calls) + result = langchain_to_openai_message(msg) + assert result["role"] == "assistant" + assert result["content"] is None + assert len(result["tool_calls"]) == 1 + tc = result["tool_calls"][0] + assert tc["id"] == "call-1" + assert tc["type"] == "function" + assert tc["function"]["name"] == "bash" + # arguments must be a JSON string + args = json.loads(tc["function"]["arguments"]) + assert args == {"command": "ls"} + + def test_ai_message_text_and_tool_calls(self): + tool_calls = [ + {"id": "call-2", "name": "read_file", "args": {"path": "/tmp/x"}}, + ] + msg = _make_ai_message(content="Reading the file", tool_calls=tool_calls) + result = langchain_to_openai_message(msg) + assert result["role"] == "assistant" + assert result["content"] == "Reading the file" + assert len(result["tool_calls"]) == 1 + + def test_ai_message_empty_content_no_tools(self): + msg = _make_ai_message(content="") + result = langchain_to_openai_message(msg) + assert result["role"] == "assistant" + assert result["content"] == "" + assert "tool_calls" not in result + + def test_ai_message_list_content(self): + # Multimodal content is preserved as-is + list_content = [ + {"type": "text", "text": "Here is an image"}, + {"type": "image_url", "image_url": {"url": "data:image/png;base64,abc"}}, + ] + msg = _make_ai_message(content=list_content) + result = langchain_to_openai_message(msg) + assert result["role"] == "assistant" + assert result["content"] == list_content + + def test_human_message(self): + msg = _make_human_message("Tell me a joke") + result = langchain_to_openai_message(msg) + assert result["role"] == "user" + assert result["content"] == "Tell me a joke" + + def test_tool_message(self): + msg = _make_tool_message(content="file contents here", tool_call_id="call-xyz") + result = langchain_to_openai_message(msg) + assert result["role"] == "tool" + assert result["tool_call_id"] == "call-xyz" + assert result["content"] == "file contents here" + + def test_system_message(self): + msg = _make_system_message("You are a helpful assistant.") + result = langchain_to_openai_message(msg) + assert result["role"] == "system" + assert result["content"] == "You are a helpful assistant." + + +class TestLangchainToOpenaiCompletion: + def test_basic_completion(self): + usage_metadata = {"input_tokens": 10, "output_tokens": 20} + msg = _make_ai_message( + content="Hello", + id="msg-abc", + usage_metadata=usage_metadata, + response_metadata={"model_name": "gpt-4o", "finish_reason": "stop"}, + ) + result = langchain_to_openai_completion(msg) + assert result["id"] == "msg-abc" + assert result["model"] == "gpt-4o" + assert len(result["choices"]) == 1 + choice = result["choices"][0] + assert choice["index"] == 0 + assert choice["finish_reason"] == "stop" + assert choice["message"]["role"] == "assistant" + assert choice["message"]["content"] == "Hello" + assert result["usage"] is not None + assert result["usage"]["prompt_tokens"] == 10 + assert result["usage"]["completion_tokens"] == 20 + assert result["usage"]["total_tokens"] == 30 + + def test_completion_with_tool_calls(self): + tool_calls = [{"id": "call-1", "name": "bash", "args": {}}] + msg = _make_ai_message( + content="", + tool_calls=tool_calls, + id="msg-tc", + response_metadata={"model_name": "gpt-4o"}, + ) + result = langchain_to_openai_completion(msg) + assert result["choices"][0]["finish_reason"] == "tool_calls" + + def test_completion_no_usage(self): + msg = _make_ai_message(content="Hi", id="msg-nousage", usage_metadata=None) + result = langchain_to_openai_completion(msg) + assert result["usage"] is None + + def test_finish_reason_from_response_metadata(self): + msg = _make_ai_message( + content="Done", + id="msg-fr", + response_metadata={"model_name": "claude-3", "finish_reason": "end_turn"}, + ) + result = langchain_to_openai_completion(msg) + assert result["choices"][0]["finish_reason"] == "end_turn" + + def test_finish_reason_default_stop(self): + msg = _make_ai_message(content="Done", id="msg-defstop", response_metadata={}) + result = langchain_to_openai_completion(msg) + assert result["choices"][0]["finish_reason"] == "stop" + + +class TestMessagesToOpenai: + def test_convert_message_list(self): + human = _make_human_message("Hi") + ai = _make_ai_message(content="Hello!") + tool_msg = _make_tool_message("result", "call-1") + messages = [human, ai, tool_msg] + result = langchain_messages_to_openai(messages) + assert len(result) == 3 + assert result[0]["role"] == "user" + assert result[1]["role"] == "assistant" + assert result[2]["role"] == "tool" + + def test_empty_list(self): + assert langchain_messages_to_openai([]) == [] diff --git a/backend/tests/test_create_deerflow_agent.py b/backend/tests/test_create_deerflow_agent.py index 03fee2055..fb403ed7f 100644 --- a/backend/tests/test_create_deerflow_agent.py +++ b/backend/tests/test_create_deerflow_agent.py @@ -116,10 +116,22 @@ def test_middleware_and_features_conflict(): # --------------------------------------------------------------------------- -# 7. Vision feature auto-injects view_image_tool +# 7. Vision feature auto-injects view_image_tool when thread data is available # --------------------------------------------------------------------------- @patch("deerflow.agents.factory.create_agent") def test_vision_injects_view_image_tool(mock_create_agent): + mock_create_agent.return_value = MagicMock() + feat = RuntimeFeatures(vision=True, sandbox=True) + + create_deerflow_agent(_make_mock_model(), features=feat) + + call_kwargs = mock_create_agent.call_args[1] + tool_names = [t.name for t in call_kwargs["tools"]] + assert "view_image" in tool_names + + +@patch("deerflow.agents.factory.create_agent") +def test_vision_without_sandbox_does_not_inject_view_image_tool(mock_create_agent): mock_create_agent.return_value = MagicMock() feat = RuntimeFeatures(vision=True, sandbox=False) @@ -127,7 +139,7 @@ def test_vision_injects_view_image_tool(mock_create_agent): call_kwargs = mock_create_agent.call_args[1] tool_names = [t.name for t in call_kwargs["tools"]] - assert "view_image" in tool_names + assert "view_image" not in tool_names def test_view_image_middleware_preserves_viewed_images_reducer(): @@ -301,11 +313,11 @@ def test_always_on_error_handling(mock_create_agent): # --------------------------------------------------------------------------- -# 17. Vision with custom middleware still injects tool +# 17. Vision with custom middleware follows thread-data availability # --------------------------------------------------------------------------- @patch("deerflow.agents.factory.create_agent") -def test_vision_custom_middleware_still_injects_tool(mock_create_agent): - """Custom vision middleware still gets the view_image_tool auto-injected.""" +def test_vision_custom_middleware_without_sandbox_does_not_inject_tool(mock_create_agent): + """Custom vision middleware without thread data does not get view_image_tool auto-injected.""" from langchain.agents.middleware import AgentMiddleware mock_create_agent.return_value = MagicMock() @@ -319,7 +331,7 @@ def test_vision_custom_middleware_still_injects_tool(mock_create_agent): call_kwargs = mock_create_agent.call_args[1] tool_names = [t.name for t in call_kwargs["tools"]] - assert "view_image" in tool_names + assert "view_image" not in tool_names # =========================================================================== diff --git a/backend/tests/test_dingtalk_channel.py b/backend/tests/test_dingtalk_channel.py new file mode 100644 index 000000000..235de6db8 --- /dev/null +++ b/backend/tests/test_dingtalk_channel.py @@ -0,0 +1,1554 @@ +"""Tests for the DingTalk channel implementation.""" + +from __future__ import annotations + +import asyncio +from types import SimpleNamespace +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from app.channels.commands import KNOWN_CHANNEL_COMMANDS +from app.channels.dingtalk import ( + _CONVERSATION_TYPE_GROUP, + _CONVERSATION_TYPE_P2P, + DingTalkChannel, + _adapt_markdown_for_dingtalk, + _convert_markdown_table, + _DingTalkMessageHandler, + _extract_text_from_rich_text, + _is_dingtalk_command, + _normalize_allowed_users, + _normalize_conversation_type, +) +from app.channels.message_bus import InboundMessageType, MessageBus, OutboundMessage + + +def _run(coro): + loop = asyncio.new_event_loop() + try: + return loop.run_until_complete(coro) + finally: + loop.close() + + +# --------------------------------------------------------------------------- +# Helper: build mock ChatbotMessage +# --------------------------------------------------------------------------- + + +def _make_chatbot_message( + *, + text: str = "hello", + message_type: str = "text", + conversation_type: str | int = _CONVERSATION_TYPE_P2P, + sender_staff_id: str = "user_001", + sender_nick: str = "Test User", + conversation_id: str = "conv_001", + message_id: str = "msg_001", + rich_text_list: list | None = None, +): + """Build a minimal mock object mimicking dingtalk_stream.ChatbotMessage.""" + msg = SimpleNamespace() + msg.message_type = message_type + msg.conversation_type = conversation_type + msg.sender_staff_id = sender_staff_id + msg.sender_nick = sender_nick + msg.conversation_id = conversation_id + msg.message_id = message_id + + if message_type == "text": + msg.text = SimpleNamespace(content=text) + msg.rich_text_content = None + elif message_type == "richText": + msg.text = None + msg.rich_text_content = SimpleNamespace(rich_text_list=rich_text_list or []) + else: + msg.text = None + msg.rich_text_content = None + + return msg + + +# --------------------------------------------------------------------------- +# _DingTalkMessageHandler SDK contract +# --------------------------------------------------------------------------- + + +class TestDingTalkMessageHandlerSdkContract: + def test_pre_start_exists_and_noop(self): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + handler = _DingTalkMessageHandler(channel) + handler.pre_start() + + def test_raw_process_returns_ack(self): + pytest.importorskip("dingtalk_stream") + + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._on_chatbot_message = MagicMock() + handler = _DingTalkMessageHandler(channel) + cb = MagicMock() + cb.headers.message_id = "mid-1" + cb.data = { + "msgtype": "text", + "text": {"content": "hi"}, + "senderStaffId": "u1", + "conversationType": "1", + "msgId": "m1", + } + ack = await handler.raw_process(cb) + assert ack.code == 200 + assert ack.headers.message_id == "mid-1" + assert ack.data == {"response": "OK"} + channel._on_chatbot_message.assert_called_once() + + _run(go()) + + +# --------------------------------------------------------------------------- +# _normalize_allowed_users tests +# --------------------------------------------------------------------------- + + +class TestNormalizeAllowedUsers: + def test_none_returns_empty(self): + assert _normalize_allowed_users(None) == set() + + def test_empty_list_returns_empty(self): + assert _normalize_allowed_users([]) == set() + + def test_list_of_strings(self): + result = _normalize_allowed_users(["user1", "user2"]) + assert result == {"user1", "user2"} + + def test_single_string(self): + result = _normalize_allowed_users("user1") + assert result == {"user1"} + + def test_numeric_values_converted_to_string(self): + result = _normalize_allowed_users([123, 456]) + assert result == {"123", "456"} + + def test_scalar_treated_as_single_value(self): + result = _normalize_allowed_users(12345) + assert result == {"12345"} + + +# --------------------------------------------------------------------------- +# _normalize_conversation_type tests +# --------------------------------------------------------------------------- + + +class TestNormalizeConversationType: + def test_group_int_or_str(self): + assert _normalize_conversation_type(2) == _CONVERSATION_TYPE_GROUP + assert _normalize_conversation_type("2") == _CONVERSATION_TYPE_GROUP + + def test_p2p_or_none(self): + assert _normalize_conversation_type(1) == _CONVERSATION_TYPE_P2P + assert _normalize_conversation_type(None) == _CONVERSATION_TYPE_P2P + + +# --------------------------------------------------------------------------- +# _is_dingtalk_command tests +# --------------------------------------------------------------------------- + + +class TestIsDingTalkCommand: + @pytest.mark.parametrize("command", sorted(KNOWN_CHANNEL_COMMANDS)) + def test_known_commands_recognized(self, command): + assert _is_dingtalk_command(command) is True + + @pytest.mark.parametrize( + "text", + [ + "/unknown", + "/mnt/user-data/outputs/report.md", + "hello", + "", + "not a command", + ], + ) + def test_non_commands_rejected(self, text): + assert _is_dingtalk_command(text) is False + + +# --------------------------------------------------------------------------- +# _extract_text_from_rich_text tests +# --------------------------------------------------------------------------- + + +class TestExtractTextFromRichText: + def test_single_text_item(self): + result = _extract_text_from_rich_text([{"text": "hello"}]) + assert result == "hello" + + def test_multiple_text_items(self): + result = _extract_text_from_rich_text([{"text": "hello"}, {"text": "world"}]) + assert result == "hello world" + + def test_non_text_items_ignored(self): + result = _extract_text_from_rich_text( + [ + {"downloadCode": "abc123"}, + {"text": "caption"}, + ] + ) + assert result == "caption" + + def test_empty_list(self): + assert _extract_text_from_rich_text([]) == "" + + +# --------------------------------------------------------------------------- +# DingTalkChannel._extract_text tests +# --------------------------------------------------------------------------- + + +class TestExtractText: + def test_plain_text(self): + msg = _make_chatbot_message(text="Hello World") + assert DingTalkChannel._extract_text(msg) == "Hello World" + + def test_plain_text_stripped(self): + msg = _make_chatbot_message(text=" Hello ") + assert DingTalkChannel._extract_text(msg) == "Hello" + + def test_rich_text(self): + msg = _make_chatbot_message( + message_type="richText", + rich_text_list=[{"text": "Part 1"}, {"text": "Part 2"}], + ) + assert DingTalkChannel._extract_text(msg) == "Part 1 Part 2" + + def test_unknown_type_returns_empty(self): + msg = _make_chatbot_message(message_type="picture") + assert DingTalkChannel._extract_text(msg) == "" + + +# --------------------------------------------------------------------------- +# DingTalkChannel._on_chatbot_message tests (inbound parsing) +# --------------------------------------------------------------------------- + + +class TestOnChatbotMessage: + def test_p2p_message_produces_correct_inbound(self): + async def go(): + bus = MessageBus() + bus.publish_inbound = AsyncMock() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._main_loop = asyncio.get_event_loop() + channel._running = True + + msg = _make_chatbot_message( + text="hello from dingtalk", + conversation_type=_CONVERSATION_TYPE_P2P, + sender_staff_id="user_001", + message_id="msg_001", + ) + + channel._send_running_reply = AsyncMock() + channel._on_chatbot_message(msg) + + await asyncio.sleep(0.1) + + bus.publish_inbound.assert_awaited_once() + inbound = bus.publish_inbound.await_args.args[0] + assert inbound.channel_name == "dingtalk" + assert inbound.chat_id == "user_001" + assert inbound.user_id == "user_001" + assert inbound.text == "hello from dingtalk" + assert inbound.topic_id is None + assert inbound.metadata["conversation_type"] == _CONVERSATION_TYPE_P2P + assert inbound.metadata["sender_staff_id"] == "user_001" + + _run(go()) + + def test_group_message_produces_correct_inbound(self): + async def go(): + bus = MessageBus() + bus.publish_inbound = AsyncMock() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._main_loop = asyncio.get_event_loop() + channel._running = True + + msg = _make_chatbot_message( + text="hello group", + conversation_type=_CONVERSATION_TYPE_GROUP, + sender_staff_id="user_002", + conversation_id="conv_group_001", + message_id="msg_group_001", + ) + + channel._send_running_reply = AsyncMock() + channel._on_chatbot_message(msg) + + await asyncio.sleep(0.1) + + bus.publish_inbound.assert_awaited_once() + inbound = bus.publish_inbound.await_args.args[0] + assert inbound.channel_name == "dingtalk" + assert inbound.chat_id == "conv_group_001" + assert inbound.user_id == "user_002" + assert inbound.text == "hello group" + assert inbound.topic_id == "msg_group_001" + assert inbound.metadata["conversation_type"] == _CONVERSATION_TYPE_GROUP + assert inbound.metadata["conversation_id"] == "conv_group_001" + + _run(go()) + + def test_group_message_integer_conversation_type_normalized(self): + """SDK may deliver conversationType as int 2 — must still route as group.""" + + async def go(): + bus = MessageBus() + bus.publish_inbound = AsyncMock() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._main_loop = asyncio.get_event_loop() + channel._running = True + + msg = _make_chatbot_message( + text="hello group", + conversation_type=2, + sender_staff_id="user_002", + conversation_id="conv_group_001", + message_id="msg_group_002", + ) + + channel._send_running_reply = AsyncMock() + channel._on_chatbot_message(msg) + + await asyncio.sleep(0.1) + + bus.publish_inbound.assert_awaited_once() + inbound = bus.publish_inbound.await_args.args[0] + assert inbound.chat_id == "conv_group_001" + assert inbound.topic_id == "msg_group_002" + assert inbound.metadata["conversation_type"] == _CONVERSATION_TYPE_GROUP + + _run(go()) + + def test_command_classified_correctly(self): + async def go(): + bus = MessageBus() + bus.publish_inbound = AsyncMock() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._main_loop = asyncio.get_event_loop() + channel._running = True + + msg = _make_chatbot_message(text="/help") + channel._send_running_reply = AsyncMock() + channel._on_chatbot_message(msg) + + await asyncio.sleep(0.1) + + bus.publish_inbound.assert_awaited_once() + inbound = bus.publish_inbound.await_args.args[0] + assert inbound.msg_type == InboundMessageType.COMMAND + + _run(go()) + + def test_non_command_classified_as_chat(self): + async def go(): + bus = MessageBus() + bus.publish_inbound = AsyncMock() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._main_loop = asyncio.get_event_loop() + channel._running = True + + msg = _make_chatbot_message(text="just chatting") + channel._send_running_reply = AsyncMock() + channel._on_chatbot_message(msg) + + await asyncio.sleep(0.1) + + bus.publish_inbound.assert_awaited_once() + inbound = bus.publish_inbound.await_args.args[0] + assert inbound.msg_type == InboundMessageType.CHAT + + _run(go()) + + def test_empty_text_ignored(self): + async def go(): + bus = MessageBus() + bus.publish_inbound = AsyncMock() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._main_loop = asyncio.get_event_loop() + channel._running = True + + msg = _make_chatbot_message(text=" ") + channel._on_chatbot_message(msg) + + await asyncio.sleep(0.1) + bus.publish_inbound.assert_not_awaited() + + _run(go()) + + +# --------------------------------------------------------------------------- +# allowed_users filtering tests +# --------------------------------------------------------------------------- + + +class TestAllowedUsersFiltering: + def test_allowed_user_passes(self): + async def go(): + bus = MessageBus() + bus.publish_inbound = AsyncMock() + channel = DingTalkChannel(bus, config={"allowed_users": ["user_001"]}) + channel._client_id = "test_key" + channel._main_loop = asyncio.get_event_loop() + channel._running = True + + msg = _make_chatbot_message(sender_staff_id="user_001") + channel._send_running_reply = AsyncMock() + channel._on_chatbot_message(msg) + + await asyncio.sleep(0.1) + bus.publish_inbound.assert_awaited_once() + + _run(go()) + + def test_non_allowed_user_blocked(self): + async def go(): + bus = MessageBus() + bus.publish_inbound = AsyncMock() + channel = DingTalkChannel(bus, config={"allowed_users": ["user_001"]}) + channel._client_id = "test_key" + channel._main_loop = asyncio.get_event_loop() + channel._running = True + + msg = _make_chatbot_message(sender_staff_id="user_blocked") + channel._on_chatbot_message(msg) + + await asyncio.sleep(0.1) + bus.publish_inbound.assert_not_awaited() + + _run(go()) + + def test_empty_allowed_users_allows_all(self): + async def go(): + bus = MessageBus() + bus.publish_inbound = AsyncMock() + channel = DingTalkChannel(bus, config={"allowed_users": []}) + channel._client_id = "test_key" + channel._main_loop = asyncio.get_event_loop() + channel._running = True + + msg = _make_chatbot_message(sender_staff_id="anyone") + channel._send_running_reply = AsyncMock() + channel._on_chatbot_message(msg) + + await asyncio.sleep(0.1) + bus.publish_inbound.assert_awaited_once() + + _run(go()) + + +# --------------------------------------------------------------------------- +# send routing tests (P2P vs Group) +# --------------------------------------------------------------------------- + + +class TestMarkdownFallbackPropagation: + def test_fallback_raises_on_failure(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._cached_token = "tok" + channel._token_expires_at = float("inf") + + channel._send_p2p_message = AsyncMock(side_effect=ConnectionError("send failed")) + + with pytest.raises(ConnectionError, match="send failed"): + await channel._send_markdown_fallback("test_key", _CONVERSATION_TYPE_P2P, "user_001", "", "hello") + + _run(go()) + + +class TestSendRouting: + def test_p2p_send_uses_oto_endpoint(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._client_secret = "test_secret" + + channel._send_p2p_message = AsyncMock() + channel._send_group_message = AsyncMock() + + msg = OutboundMessage( + channel_name="dingtalk", + chat_id="user_001", + thread_id="thread_001", + text="Hello P2P", + metadata={ + "conversation_type": _CONVERSATION_TYPE_P2P, + "sender_staff_id": "user_001", + "conversation_id": "", + }, + ) + + await channel.send(msg) + + channel._send_p2p_message.assert_awaited_once_with("test_key", "user_001", "Hello P2P") + channel._send_group_message.assert_not_awaited() + + _run(go()) + + def test_group_send_uses_group_endpoint(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._client_secret = "test_secret" + + channel._send_p2p_message = AsyncMock() + channel._send_group_message = AsyncMock() + + msg = OutboundMessage( + channel_name="dingtalk", + chat_id="conv_001", + thread_id="thread_001", + text="Hello Group", + metadata={ + "conversation_type": _CONVERSATION_TYPE_GROUP, + "sender_staff_id": "user_001", + "conversation_id": "conv_001", + }, + ) + + await channel.send(msg) + + channel._send_group_message.assert_awaited_once_with("test_key", "conv_001", "Hello Group", at_user_ids=["user_001"]) + channel._send_p2p_message.assert_not_awaited() + + _run(go()) + + def test_default_metadata_uses_p2p(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._client_secret = "test_secret" + + channel._send_p2p_message = AsyncMock() + channel._send_group_message = AsyncMock() + + msg = OutboundMessage( + channel_name="dingtalk", + chat_id="user_001", + thread_id="thread_001", + text="Hello", + metadata={}, + ) + + await channel.send(msg) + + channel._send_p2p_message.assert_awaited_once() + channel._send_group_message.assert_not_awaited() + + _run(go()) + + +# --------------------------------------------------------------------------- +# send retry tests +# --------------------------------------------------------------------------- + + +class TestSendRetry: + def test_retries_on_failure_then_succeeds(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._client_secret = "test_secret" + + call_count = 0 + + async def flaky_send(robot_code, user_id, text): + nonlocal call_count + call_count += 1 + if call_count < 3: + raise ConnectionError("network error") + + channel._send_p2p_message = AsyncMock(side_effect=flaky_send) + + msg = OutboundMessage( + channel_name="dingtalk", + chat_id="user_001", + thread_id="thread_001", + text="hello", + metadata={"conversation_type": _CONVERSATION_TYPE_P2P, "sender_staff_id": "user_001"}, + ) + + await channel.send(msg) + assert call_count == 3 + + _run(go()) + + def test_raises_after_all_retries_exhausted(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._client_secret = "test_secret" + + channel._send_p2p_message = AsyncMock(side_effect=ConnectionError("fail")) + + msg = OutboundMessage( + channel_name="dingtalk", + chat_id="user_001", + thread_id="thread_001", + text="hello", + metadata={"conversation_type": _CONVERSATION_TYPE_P2P, "sender_staff_id": "user_001"}, + ) + + with pytest.raises(ConnectionError): + await channel.send(msg) + + assert channel._send_p2p_message.await_count == 3 + + _run(go()) + + def test_raises_runtime_error_when_no_attempts_configured(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._client_secret = "test_secret" + + msg = OutboundMessage( + channel_name="dingtalk", + chat_id="user_001", + thread_id="thread_001", + text="hello", + metadata={"conversation_type": _CONVERSATION_TYPE_P2P, "sender_staff_id": "user_001"}, + ) + + with pytest.raises(RuntimeError, match="without an exception"): + await channel.send(msg, _max_retries=0) + + _run(go()) + + +# --------------------------------------------------------------------------- +# topic_id mapping tests +# --------------------------------------------------------------------------- + + +class TestTopicIdMapping: + def test_p2p_topic_is_none(self): + async def go(): + bus = MessageBus() + bus.publish_inbound = AsyncMock() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._main_loop = asyncio.get_event_loop() + channel._running = True + + msg = _make_chatbot_message( + conversation_type=_CONVERSATION_TYPE_P2P, + message_id="msg_p2p_001", + ) + channel._send_running_reply = AsyncMock() + channel._on_chatbot_message(msg) + + await asyncio.sleep(0.1) + inbound = bus.publish_inbound.await_args.args[0] + assert inbound.topic_id is None + + _run(go()) + + def test_group_topic_is_message_id(self): + async def go(): + bus = MessageBus() + bus.publish_inbound = AsyncMock() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._main_loop = asyncio.get_event_loop() + channel._running = True + + msg = _make_chatbot_message( + conversation_type=_CONVERSATION_TYPE_GROUP, + message_id="msg_group_001", + conversation_id="conv_001", + ) + channel._send_running_reply = AsyncMock() + channel._on_chatbot_message(msg) + + await asyncio.sleep(0.1) + inbound = bus.publish_inbound.await_args.args[0] + assert inbound.topic_id == "msg_group_001" + + _run(go()) + + +# --------------------------------------------------------------------------- +# Token caching tests +# --------------------------------------------------------------------------- + + +class TestAccessTokenValidation: + def test_rejects_non_dict_response(self): + async def go(): + from unittest.mock import patch + + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "k" + channel._client_secret = "s" + + class FakeResponse: + def raise_for_status(self): + pass + + def json(self): + return "not a dict" + + class FakeClient: + async def __aenter__(self): + return self + + async def __aexit__(self, *a): + pass + + async def post(self, url, **kwargs): + return FakeResponse() + + with patch("app.channels.dingtalk.httpx.AsyncClient", return_value=FakeClient()): + with pytest.raises(ValueError, match="JSON object"): + await channel._get_access_token() + + _run(go()) + + def test_rejects_empty_access_token(self): + async def go(): + from unittest.mock import patch + + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "k" + channel._client_secret = "s" + + class FakeResponse: + def raise_for_status(self): + pass + + def json(self): + return {"accessToken": "", "expireIn": 7200} + + class FakeClient: + async def __aenter__(self): + return self + + async def __aexit__(self, *a): + pass + + async def post(self, url, **kwargs): + return FakeResponse() + + with patch("app.channels.dingtalk.httpx.AsyncClient", return_value=FakeClient()): + with pytest.raises(ValueError, match="usable accessToken"): + await channel._get_access_token() + + _run(go()) + + def test_invalid_expire_in_uses_default(self): + async def go(): + import time + from unittest.mock import patch + + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "k" + channel._client_secret = "s" + + class FakeResponse: + def raise_for_status(self): + pass + + def json(self): + return {"accessToken": "tok_ok", "expireIn": "invalid"} + + class FakeClient: + async def __aenter__(self): + return self + + async def __aexit__(self, *a): + pass + + async def post(self, url, **kwargs): + return FakeResponse() + + before = time.monotonic() + with patch("app.channels.dingtalk.httpx.AsyncClient", return_value=FakeClient()): + token = await channel._get_access_token() + + assert token == "tok_ok" + assert channel._token_expires_at > before + + _run(go()) + + +class TestTokenCaching: + def test_token_is_cached_across_calls(self): + async def go(): + from unittest.mock import patch + + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._client_secret = "test_secret" + + call_count = 0 + + class FakeResponse: + def raise_for_status(self): + pass + + def json(self): + return {"accessToken": "tok_abc", "expireIn": 7200} + + class FakeClient: + async def __aenter__(self): + return self + + async def __aexit__(self, *a): + pass + + async def post(self, url, **kwargs): + nonlocal call_count + call_count += 1 + return FakeResponse() + + with patch("app.channels.dingtalk.httpx.AsyncClient", return_value=FakeClient()): + t1 = await channel._get_access_token() + t2 = await channel._get_access_token() + + assert t1 == "tok_abc" + assert t2 == "tok_abc" + assert call_count == 1 + + _run(go()) + + +# --------------------------------------------------------------------------- +# Group message @ mention format tests +# --------------------------------------------------------------------------- + + +class TestGroupMessageMarkdownFormat: + def test_at_user_ids_still_use_markdown(self): + """groupMessages/send uses sampleMarkdown; @{userId} in body returns 400 so at_user_ids is ignored.""" + + async def go(): + from unittest.mock import patch + + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._client_secret = "test_secret" + channel._cached_token = "tok_test" + channel._token_expires_at = float("inf") + + captured_json: list[dict] = [] + + class FakeResponse: + def raise_for_status(self): + pass + + def json(self): + return {"processQueryKey": "ok"} + + class FakeClient: + async def __aenter__(self): + return self + + async def __aexit__(self, *a): + pass + + async def post(self, url, **kwargs): + captured_json.append(kwargs.get("json", {})) + return FakeResponse() + + with patch("app.channels.dingtalk.httpx.AsyncClient", return_value=FakeClient()): + await channel._send_group_message("bot", "conv1", "hello", at_user_ids=["staff_001"]) + + assert len(captured_json) == 1 + payload = captured_json[0] + assert payload["msgKey"] == "sampleMarkdown" + import json + + param = json.loads(payload["msgParam"]) + assert param["text"] == "hello" + assert "@" not in json.dumps(param) + + _run(go()) + + def test_no_at_user_ids_uses_markdown(self): + async def go(): + from unittest.mock import patch + + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "test_key" + channel._client_secret = "test_secret" + channel._cached_token = "tok_test" + channel._token_expires_at = float("inf") + + captured_json: list[dict] = [] + + class FakeResponse: + def raise_for_status(self): + pass + + def json(self): + return {"processQueryKey": "ok"} + + class FakeClient: + async def __aenter__(self): + return self + + async def __aexit__(self, *a): + pass + + async def post(self, url, **kwargs): + captured_json.append(kwargs.get("json", {})) + return FakeResponse() + + with patch("app.channels.dingtalk.httpx.AsyncClient", return_value=FakeClient()): + await channel._send_group_message("bot", "conv1", "hello") + + assert len(captured_json) == 1 + payload = captured_json[0] + assert payload["msgKey"] == "sampleMarkdown" + + _run(go()) + + +class TestAdaptMarkdownForDingtalk: + def test_fenced_code_block_to_blockquote(self): + text = "Hello\n```python\ndef foo():\n return 1\n```\nDone" + result = _adapt_markdown_for_dingtalk(text) + assert "```" not in result + assert "> **python**" in result + assert "> def foo():" in result + assert "> return 1" in result + + def test_fenced_code_block_no_language(self): + text = "```\nplain code\n```" + result = _adapt_markdown_for_dingtalk(text) + assert "```" not in result + assert "> plain code" in result + + def test_inline_code_to_bold(self): + text = "Use `pip install` to install" + result = _adapt_markdown_for_dingtalk(text) + assert result == "Use **pip install** to install" + + def test_horizontal_rule_to_unicode(self): + text = "Above\n---\nBelow" + result = _adapt_markdown_for_dingtalk(text) + assert "───────────" in result + assert "---" not in result + + def test_supported_markdown_preserved(self): + text = "# Title\n**bold** and *italic*\n- list item\n> quote\n[link](http://example.com)" + result = _adapt_markdown_for_dingtalk(text) + assert result == text + + def test_plain_text_unchanged(self): + text = "Hello world, no markdown here." + assert _adapt_markdown_for_dingtalk(text) == text + + def test_combined_elements(self): + text = "# Report\n\nRun `make test` then:\n\n```bash\npytest -v\n```\n\n---\n\nDone." + result = _adapt_markdown_for_dingtalk(text) + assert "# Report" in result + assert "**make test**" in result + assert "> **bash**" in result + assert "> pytest -v" in result + assert "───────────" in result + assert "Done." in result + + +class TestConvertMarkdownTable: + def test_simple_table(self): + text = "| Name | Age |\n|------|-----|\n| Alice | 30 |\n| Bob | 25 |" + result = _convert_markdown_table(text) + assert "> **Name**: Alice" in result + assert "> **Age**: 30" in result + assert "> **Name**: Bob" in result + assert "> **Age**: 25" in result + assert "|" not in result + + def test_table_with_surrounding_text(self): + text = "Results:\n\n| Key | Value |\n|-----|-------|\n| a | 1 |\n\nEnd." + result = _convert_markdown_table(text) + assert "Results:" in result + assert "> **Key**: a" in result + assert "> **Value**: 1" in result + assert "End." in result + + def test_no_table(self): + text = "Just plain text\nwith lines" + assert _convert_markdown_table(text) == text + + def test_alignment_separators(self): + text = "| Left | Center | Right |\n|:-----|:------:|------:|\n| a | b | c |" + result = _convert_markdown_table(text) + assert "> **Left**: a" in result + assert "> **Center**: b" in result + assert "> **Right**: c" in result + + +class TestUploadMediaValidation: + def test_non_dict_response_returns_none(self): + async def go(): + from unittest.mock import patch + + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "k" + channel._client_secret = "s" + channel._cached_token = "tok" + channel._token_expires_at = float("inf") + + class FakeResponse: + def raise_for_status(self): + pass + + def json(self): + return ["not", "a", "dict"] + + class FakeClient: + async def __aenter__(self): + return self + + async def __aexit__(self, *a): + pass + + async def post(self, url, **kwargs): + return FakeResponse() + + with patch("app.channels.dingtalk.httpx.AsyncClient", return_value=FakeClient()): + result = await channel._upload_media("/tmp/test.png", "image") + + assert result is None + + _run(go()) + + def test_json_decode_error_returns_none(self): + async def go(): + import json as json_mod + from unittest.mock import patch + + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + channel._client_id = "k" + channel._client_secret = "s" + channel._cached_token = "tok" + channel._token_expires_at = float("inf") + + class FakeResponse: + def raise_for_status(self): + pass + + def json(self): + raise json_mod.JSONDecodeError("err", "", 0) + + class FakeClient: + async def __aenter__(self): + return self + + async def __aexit__(self, *a): + pass + + async def post(self, url, **kwargs): + return FakeResponse() + + with patch("app.channels.dingtalk.httpx.AsyncClient", return_value=FakeClient()): + result = await channel._upload_media("/tmp/test.png", "image") + + assert result is None + + _run(go()) + + +class TestChannelRegistration: + def test_dingtalk_in_channel_registry(self): + from app.channels.service import _CHANNEL_REGISTRY + + assert "dingtalk" in _CHANNEL_REGISTRY + assert _CHANNEL_REGISTRY["dingtalk"] == "app.channels.dingtalk:DingTalkChannel" + + def test_dingtalk_in_credential_keys(self): + from app.channels.service import _CHANNEL_CREDENTIAL_KEYS + + assert "dingtalk" in _CHANNEL_CREDENTIAL_KEYS + assert "client_id" in _CHANNEL_CREDENTIAL_KEYS["dingtalk"] + assert "client_secret" in _CHANNEL_CREDENTIAL_KEYS["dingtalk"] + + def test_dingtalk_in_channel_capabilities(self): + from app.channels.manager import CHANNEL_CAPABILITIES + + assert "dingtalk" in CHANNEL_CAPABILITIES + assert CHANNEL_CAPABILITIES["dingtalk"]["supports_streaming"] is False + + +# --------------------------------------------------------------------------- +# AI Card streaming mode tests +# --------------------------------------------------------------------------- + + +class TestCardMode: + def test_card_mode_enabled_supports_streaming(self): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + assert channel.supports_streaming is True + + def test_non_card_mode_no_streaming(self): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + assert channel.supports_streaming is False + + def test_non_card_mode_unchanged(self): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + assert channel._card_template_id == "" + assert channel._card_track_ids == {} + assert channel._card_repliers == {} + assert channel._incoming_messages == {} + assert channel._dingtalk_client is None + + def test_card_source_key_matches_inbound_using_message_id_metadata(self): + """Outbound correlation must match inbound ``message_id`` even if ``thread_ts`` drifts.""" + + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + inbound = channel._make_inbound( + chat_id="x", + user_id="u", + text="hi", + thread_ts="ts_fallback", + metadata={ + "conversation_type": _CONVERSATION_TYPE_P2P, + "sender_staff_id": "user_001", + "conversation_id": "", + "message_id": "msg_real", + }, + ) + out = OutboundMessage( + channel_name="dingtalk", + chat_id="x", + thread_id="t", + text="ok", + thread_ts="wrong_ts", + metadata=dict(inbound.metadata), + ) + assert channel._make_card_source_key(inbound) == channel._make_card_source_key_from_outbound(out) + + _run(go()) + + def test_running_reply_creates_card(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + channel._client_id = "test_key" + + channel._create_and_deliver_card = AsyncMock(return_value="track_001") + + inbound = channel._make_inbound( + chat_id="user_001", + user_id="user_001", + text="hello", + metadata={ + "conversation_type": _CONVERSATION_TYPE_P2P, + "sender_staff_id": "user_001", + "conversation_id": "", + "message_id": "msg_001", + }, + ) + + mock_chatbot_msg = MagicMock() + source_key = channel._make_card_source_key(inbound) + channel._incoming_messages[source_key] = mock_chatbot_msg + + await channel._send_running_reply("user_001", inbound) + + channel._create_and_deliver_card.assert_awaited_once_with( + "\u23f3 Working on it...", + chatbot_message=mock_chatbot_msg, + ) + assert channel._card_track_ids[source_key] == "track_001" + + _run(go()) + + def test_send_streams_to_card(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + channel._client_id = "test_key" + + channel._stream_update_card = AsyncMock() + + # Pre-populate card tracking + source_key = f"{_CONVERSATION_TYPE_P2P}:user_001::msg_001" + channel._card_track_ids[source_key] = "track_001" + + msg = OutboundMessage( + channel_name="dingtalk", + chat_id="user_001", + thread_id="thread_001", + text="Partial response...", + is_final=False, + thread_ts="msg_001", + metadata={ + "conversation_type": _CONVERSATION_TYPE_P2P, + "sender_staff_id": "user_001", + "conversation_id": "", + }, + ) + + await channel.send(msg) + + channel._stream_update_card.assert_awaited_once_with( + "track_001", + "Partial response...", + is_finalize=False, + ) + # Track ID should still exist (not final) + assert source_key in channel._card_track_ids + + _run(go()) + + def test_send_finalizes_card(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + channel._client_id = "test_key" + + channel._stream_update_card = AsyncMock() + + source_key = f"{_CONVERSATION_TYPE_P2P}:user_001::msg_001" + channel._card_track_ids[source_key] = "track_001" + + msg = OutboundMessage( + channel_name="dingtalk", + chat_id="user_001", + thread_id="thread_001", + text="Final answer.", + is_final=True, + thread_ts="msg_001", + metadata={ + "conversation_type": _CONVERSATION_TYPE_P2P, + "sender_staff_id": "user_001", + "conversation_id": "", + }, + ) + + await channel.send(msg) + + channel._stream_update_card.assert_awaited_once_with( + "track_001", + "Final answer.", + is_finalize=True, + ) + # Track ID should be cleaned up after final + assert source_key not in channel._card_track_ids + + _run(go()) + + def test_card_mode_skips_markdown_adaptation(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + channel._client_id = "test_key" + + raw_markdown = "```python\ndef foo():\n pass\n```" + captured_content: list[str] = [] + + async def capture_stream(out_track_id, content, *, is_finalize=False, is_error=False): + captured_content.append(content) + + channel._stream_update_card = AsyncMock(side_effect=capture_stream) + + source_key = f"{_CONVERSATION_TYPE_P2P}:user_001::msg_001" + channel._card_track_ids[source_key] = "track_001" + + msg = OutboundMessage( + channel_name="dingtalk", + chat_id="user_001", + thread_id="thread_001", + text=raw_markdown, + is_final=True, + thread_ts="msg_001", + metadata={ + "conversation_type": _CONVERSATION_TYPE_P2P, + "sender_staff_id": "user_001", + "conversation_id": "", + }, + ) + + await channel.send(msg) + + # Raw markdown should be passed through without adaptation + assert captured_content[0] == raw_markdown + + _run(go()) + + def test_card_fallback_on_creation_failure(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + channel._client_id = "test_key" + + # Card creation returns None (failure) + channel._create_and_deliver_card = AsyncMock(return_value=None) + channel._send_text_message_to_user = AsyncMock() + + inbound = channel._make_inbound( + chat_id="user_001", + user_id="user_001", + text="hello", + metadata={ + "conversation_type": _CONVERSATION_TYPE_P2P, + "sender_staff_id": "user_001", + "conversation_id": "", + "message_id": "msg_001", + }, + ) + + source_key = channel._make_card_source_key(inbound) + channel._incoming_messages[source_key] = MagicMock() + + await channel._send_running_reply("user_001", inbound) + + # Should fall through to text message + channel._send_text_message_to_user.assert_awaited_once() + assert len(channel._card_track_ids) == 0 + + _run(go()) + + def test_send_skips_non_final_without_card_track_when_template_configured(self): + """Without a live card track, Manager streaming would duplicate sampleMarkdown sends.""" + + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + channel._client_id = "test_key" + channel._send_group_message = AsyncMock() + channel._send_p2p_message = AsyncMock() + + meta = { + "conversation_type": _CONVERSATION_TYPE_P2P, + "sender_staff_id": "user_001", + "conversation_id": "", + } + await channel.send( + OutboundMessage( + channel_name="dingtalk", + chat_id="user_001", + thread_id="t1", + text="partial", + is_final=False, + thread_ts="msg_001", + metadata=meta, + ) + ) + channel._send_p2p_message.assert_not_called() + channel._send_group_message.assert_not_called() + + await channel.send( + OutboundMessage( + channel_name="dingtalk", + chat_id="user_001", + thread_id="t1", + text="final answer", + is_final=True, + thread_ts="msg_001", + metadata=meta, + ) + ) + channel._send_p2p_message.assert_awaited_once() + + _run(go()) + + def test_card_fallback_on_stream_failure(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + channel._client_id = "test_key" + + channel._stream_update_card = AsyncMock(side_effect=ConnectionError("stream failed")) + channel._send_markdown_fallback = AsyncMock() + + source_key = f"{_CONVERSATION_TYPE_P2P}:user_001::msg_001" + channel._card_track_ids[source_key] = "track_001" + + msg = OutboundMessage( + channel_name="dingtalk", + chat_id="user_001", + thread_id="thread_001", + text="Final answer.", + is_final=True, + thread_ts="msg_001", + metadata={ + "conversation_type": _CONVERSATION_TYPE_P2P, + "sender_staff_id": "user_001", + "conversation_id": "", + }, + ) + + await channel.send(msg) + + # Should fallback to markdown + channel._send_markdown_fallback.assert_awaited_once_with( + "test_key", + _CONVERSATION_TYPE_P2P, + "user_001", + "", + "Final answer.", + ) + # Track ID should be cleaned up + assert source_key not in channel._card_track_ids + + _run(go()) + + def test_pre_start_stores_dingtalk_client(self): + bus = MessageBus() + channel = DingTalkChannel(bus, config={}) + handler = _DingTalkMessageHandler(channel) + + mock_client = MagicMock() + handler.dingtalk_client = mock_client + handler.pre_start() + + assert channel._dingtalk_client is mock_client + + def test_chatbot_message_stored_for_card_mode(self): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + + mock_message = MagicMock() + mock_message.sender_staff_id = "user_001" + mock_message.conversation_type = "1" + mock_message.conversation_id = "" + mock_message.message_id = "msg_001" + mock_message.sender_nick = "TestUser" + mock_message.message_type = "text" + mock_message.text = MagicMock(content="hello") + mock_message.rich_text_content = None + + channel._main_loop = MagicMock() + channel._main_loop.is_running.return_value = False + channel._allowed_users = set() + channel._running = True + + channel._on_chatbot_message(mock_message) + + assert len(channel._incoming_messages) == 1 + stored_msg = list(channel._incoming_messages.values())[0] + assert stored_msg is mock_message + + def test_card_replier_cleanup_on_final(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + channel._client_id = "test_key" + + channel._stream_update_card = AsyncMock() + + source_key = f"{_CONVERSATION_TYPE_P2P}:user_001::msg_001" + channel._card_track_ids[source_key] = "track_001" + channel._card_repliers["track_001"] = MagicMock() + + msg = OutboundMessage( + channel_name="dingtalk", + chat_id="user_001", + thread_id="thread_001", + text="Final answer.", + is_final=True, + thread_ts="msg_001", + metadata={ + "conversation_type": _CONVERSATION_TYPE_P2P, + "sender_staff_id": "user_001", + "conversation_id": "", + }, + ) + + await channel.send(msg) + + assert source_key not in channel._card_track_ids + assert "track_001" not in channel._card_repliers + + _run(go()) + + def test_card_creation_without_sdk_client_returns_none(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + channel._dingtalk_client = None + + result = await channel._create_and_deliver_card( + "test", + chatbot_message=MagicMock(), + ) + assert result is None + + _run(go()) + + def test_card_creation_without_chatbot_message_returns_none(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + channel._dingtalk_client = MagicMock() + + result = await channel._create_and_deliver_card( + "test", + chatbot_message=None, + ) + assert result is None + + _run(go()) + + def test_stream_update_card_raises_without_replier(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + + with pytest.raises(RuntimeError, match="No AICardReplier found"): + await channel._stream_update_card("nonexistent_track", "content") + + _run(go()) + + def test_stop_clears_card_state(self): + async def go(): + bus = MessageBus() + channel = DingTalkChannel(bus, config={"card_template_id": "tpl_123"}) + channel._running = True + channel._dingtalk_client = MagicMock() + channel._incoming_messages["key"] = MagicMock() + channel._card_repliers["track"] = MagicMock() + channel._card_track_ids["source"] = "track" + + await channel.stop() + + assert channel._dingtalk_client is None + assert channel._incoming_messages == {} + assert channel._card_repliers == {} + assert channel._card_track_ids == {} + + _run(go()) diff --git a/backend/tests/test_ensure_admin.py b/backend/tests/test_ensure_admin.py new file mode 100644 index 000000000..9930b047f --- /dev/null +++ b/backend/tests/test_ensure_admin.py @@ -0,0 +1,296 @@ +"""Tests for _ensure_admin_user() in app.py. + +Covers: first-boot no-op (admin creation removed), orphan migration +when admin exists, no-op on no admin found, and edge cases. +""" + +import asyncio +import os +from types import SimpleNamespace +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +os.environ.setdefault("AUTH_JWT_SECRET", "test-secret-key-ensure-admin-testing-min-32") + +from app.gateway.auth.config import AuthConfig, set_auth_config + +_JWT_SECRET = "test-secret-key-ensure-admin-testing-min-32" + + +@pytest.fixture(autouse=True) +def _setup_auth_config(): + set_auth_config(AuthConfig(jwt_secret=_JWT_SECRET)) + yield + set_auth_config(AuthConfig(jwt_secret=_JWT_SECRET)) + + +def _make_app_stub(store=None): + """Minimal app-like object with state.store.""" + app = SimpleNamespace() + app.state = SimpleNamespace() + app.state.store = store + return app + + +def _make_provider(admin_count=0): + p = AsyncMock() + p.count_users = AsyncMock(return_value=admin_count) + p.count_admin_users = AsyncMock(return_value=admin_count) + p.create_user = AsyncMock() + p.update_user = AsyncMock(side_effect=lambda u: u) + return p + + +def _make_session_factory(admin_row=None): + """Build a mock async session factory that returns a row from execute().""" + row_result = MagicMock() + row_result.scalar_one_or_none.return_value = admin_row + + execute_result = MagicMock() + execute_result.scalar_one_or_none.return_value = admin_row + + session = AsyncMock() + session.execute = AsyncMock(return_value=execute_result) + + # Async context manager + session_cm = AsyncMock() + session_cm.__aenter__ = AsyncMock(return_value=session) + session_cm.__aexit__ = AsyncMock(return_value=False) + + sf = MagicMock() + sf.return_value = session_cm + return sf + + +# ── First boot: no admin → return early ────────────────────────────────── + + +def test_first_boot_does_not_create_admin(): + """admin_count==0 → do NOT create admin automatically.""" + provider = _make_provider(admin_count=0) + app = _make_app_stub() + + with patch("app.gateway.deps.get_local_provider", return_value=provider): + from app.gateway.app import _ensure_admin_user + + asyncio.run(_ensure_admin_user(app)) + + provider.create_user.assert_not_called() + + +def test_first_boot_skips_migration(): + """No admin → return early before any migration attempt.""" + provider = _make_provider(admin_count=0) + store = AsyncMock() + store.asearch = AsyncMock(return_value=[]) + app = _make_app_stub(store=store) + + with patch("app.gateway.deps.get_local_provider", return_value=provider): + from app.gateway.app import _ensure_admin_user + + asyncio.run(_ensure_admin_user(app)) + + store.asearch.assert_not_called() + + +# ── Admin exists: migration runs when admin row found ──────────────────── + + +def test_admin_exists_triggers_migration(): + """Admin exists and admin row found → _migrate_orphaned_threads called.""" + from uuid import uuid4 + + admin_row = MagicMock() + admin_row.id = uuid4() + + provider = _make_provider(admin_count=1) + sf = _make_session_factory(admin_row=admin_row) + store = AsyncMock() + store.asearch = AsyncMock(return_value=[]) + app = _make_app_stub(store=store) + + with patch("app.gateway.deps.get_local_provider", return_value=provider): + with patch("deerflow.persistence.engine.get_session_factory", return_value=sf): + from app.gateway.app import _ensure_admin_user + + asyncio.run(_ensure_admin_user(app)) + + store.asearch.assert_called_once() + + +def test_admin_exists_no_admin_row_skips_migration(): + """Admin count > 0 but DB row missing (edge case) → skip migration gracefully.""" + provider = _make_provider(admin_count=2) + sf = _make_session_factory(admin_row=None) + store = AsyncMock() + app = _make_app_stub(store=store) + + with patch("app.gateway.deps.get_local_provider", return_value=provider): + with patch("deerflow.persistence.engine.get_session_factory", return_value=sf): + from app.gateway.app import _ensure_admin_user + + asyncio.run(_ensure_admin_user(app)) + + store.asearch.assert_not_called() + + +def test_admin_exists_no_store_skips_migration(): + """Admin exists, row found, but no store → no crash, no migration.""" + from uuid import uuid4 + + admin_row = MagicMock() + admin_row.id = uuid4() + + provider = _make_provider(admin_count=1) + sf = _make_session_factory(admin_row=admin_row) + app = _make_app_stub(store=None) + + with patch("app.gateway.deps.get_local_provider", return_value=provider): + with patch("deerflow.persistence.engine.get_session_factory", return_value=sf): + from app.gateway.app import _ensure_admin_user + + asyncio.run(_ensure_admin_user(app)) + + # No assertion needed — just verify no crash + + +def test_admin_exists_session_factory_none_skips_migration(): + """get_session_factory() returns None → return early, no crash.""" + provider = _make_provider(admin_count=1) + store = AsyncMock() + app = _make_app_stub(store=store) + + with patch("app.gateway.deps.get_local_provider", return_value=provider): + with patch("deerflow.persistence.engine.get_session_factory", return_value=None): + from app.gateway.app import _ensure_admin_user + + asyncio.run(_ensure_admin_user(app)) + + store.asearch.assert_not_called() + + +def test_migration_failure_is_non_fatal(): + """_migrate_orphaned_threads exception is caught and logged.""" + from uuid import uuid4 + + admin_row = MagicMock() + admin_row.id = uuid4() + + provider = _make_provider(admin_count=1) + sf = _make_session_factory(admin_row=admin_row) + store = AsyncMock() + store.asearch = AsyncMock(side_effect=RuntimeError("store crashed")) + app = _make_app_stub(store=store) + + with patch("app.gateway.deps.get_local_provider", return_value=provider): + with patch("deerflow.persistence.engine.get_session_factory", return_value=sf): + from app.gateway.app import _ensure_admin_user + + # Should not raise + asyncio.run(_ensure_admin_user(app)) + + +# ── Section 5.1-5.6 upgrade path: orphan thread migration ──────────────── + + +def test_migrate_orphaned_threads_stamps_user_id_on_unowned_rows(): + """First boot finds Store-only legacy threads → stamps admin's id. + + Validates the **TC-UPG-02 upgrade story**: an operator running main + (no auth) accumulates threads in the LangGraph Store namespace + ``("threads",)`` with no ``metadata.user_id``. After upgrading to + feat/auth-on-2.0-rc, the first ``_ensure_admin_user`` boot should + rewrite each unowned item with the freshly created admin's id. + """ + from app.gateway.app import _migrate_orphaned_threads + + # Three orphan items + one already-owned item that should be left alone. + items = [ + SimpleNamespace(key="t1", value={"metadata": {"title": "old-thread-1"}}), + SimpleNamespace(key="t2", value={"metadata": {"title": "old-thread-2"}}), + SimpleNamespace(key="t3", value={"metadata": {}}), + SimpleNamespace(key="t4", value={"metadata": {"user_id": "someone-else", "title": "preserved"}}), + ] + store = AsyncMock() + # asearch returns the entire batch on first call, then an empty page + # to terminate _iter_store_items. + store.asearch = AsyncMock(side_effect=[items, []]) + aput_calls: list[tuple[tuple, str, dict]] = [] + + async def _record_aput(namespace, key, value): + aput_calls.append((namespace, key, value)) + + store.aput = AsyncMock(side_effect=_record_aput) + + migrated = asyncio.run(_migrate_orphaned_threads(store, "admin-id-42")) + + # Three orphan rows migrated, one preserved. + assert migrated == 3 + assert len(aput_calls) == 3 + rewritten_keys = {call[1] for call in aput_calls} + assert rewritten_keys == {"t1", "t2", "t3"} + # Each rewrite carries the new user_id; titles preserved where present. + by_key = {call[1]: call[2] for call in aput_calls} + assert by_key["t1"]["metadata"]["user_id"] == "admin-id-42" + assert by_key["t1"]["metadata"]["title"] == "old-thread-1" + assert by_key["t3"]["metadata"]["user_id"] == "admin-id-42" + # The pre-owned item must NOT have been rewritten. + assert "t4" not in rewritten_keys + + +def test_migrate_orphaned_threads_empty_store_is_noop(): + """A store with no threads → migrated == 0, no aput calls.""" + from app.gateway.app import _migrate_orphaned_threads + + store = AsyncMock() + store.asearch = AsyncMock(return_value=[]) + store.aput = AsyncMock() + + migrated = asyncio.run(_migrate_orphaned_threads(store, "admin-id-42")) + + assert migrated == 0 + store.aput.assert_not_called() + + +def test_iter_store_items_walks_multiple_pages(): + """Cursor-style iterator pulls every page until a short page terminates. + + Closes the regression where the old hardcoded ``limit=1000`` could + silently drop orphans on a large pre-upgrade dataset. The migration + code path uses the default ``page_size=500``; this test pins the + iterator with ``page_size=2`` so it stays fast. + """ + from app.gateway.app import _iter_store_items + + page_a = [SimpleNamespace(key=f"t{i}", value={"metadata": {}}) for i in range(2)] + page_b = [SimpleNamespace(key=f"t{i + 2}", value={"metadata": {}}) for i in range(2)] + page_c: list = [] # short page → loop terminates + + store = AsyncMock() + store.asearch = AsyncMock(side_effect=[page_a, page_b, page_c]) + + async def _collect(): + return [item.key async for item in _iter_store_items(store, ("threads",), page_size=2)] + + keys = asyncio.run(_collect()) + assert keys == ["t0", "t1", "t2", "t3"] + # Three asearch calls: full batch, full batch, empty terminator + assert store.asearch.await_count == 3 + + +def test_iter_store_items_terminates_on_short_page(): + """A short page (len < page_size) ends the loop without an extra call.""" + from app.gateway.app import _iter_store_items + + page = [SimpleNamespace(key=f"t{i}", value={}) for i in range(3)] + store = AsyncMock() + store.asearch = AsyncMock(return_value=page) + + async def _collect(): + return [item.key async for item in _iter_store_items(store, ("threads",), page_size=10)] + + keys = asyncio.run(_collect()) + assert keys == ["t0", "t1", "t2"] + # Only one call — no terminator probe needed because len(batch) < page_size + assert store.asearch.await_count == 1 diff --git a/backend/tests/test_feedback.py b/backend/tests/test_feedback.py new file mode 100644 index 000000000..a592bdd22 --- /dev/null +++ b/backend/tests/test_feedback.py @@ -0,0 +1,289 @@ +"""Tests for FeedbackRepository and follow-up association. + +Uses temp SQLite DB for ORM tests. +""" + +import pytest + +from deerflow.persistence.feedback import FeedbackRepository + + +async def _make_feedback_repo(tmp_path): + from deerflow.persistence.engine import get_session_factory, init_engine + + url = f"sqlite+aiosqlite:///{tmp_path / 'test.db'}" + await init_engine("sqlite", url=url, sqlite_dir=str(tmp_path)) + return FeedbackRepository(get_session_factory()) + + +async def _cleanup(): + from deerflow.persistence.engine import close_engine + + await close_engine() + + +# -- FeedbackRepository -- + + +class TestFeedbackRepository: + @pytest.mark.anyio + async def test_create_positive(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + record = await repo.create(run_id="r1", thread_id="t1", rating=1) + assert record["feedback_id"] + assert record["rating"] == 1 + assert record["run_id"] == "r1" + assert record["thread_id"] == "t1" + assert "created_at" in record + await _cleanup() + + @pytest.mark.anyio + async def test_create_negative_with_comment(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + record = await repo.create( + run_id="r1", + thread_id="t1", + rating=-1, + comment="Response was inaccurate", + ) + assert record["rating"] == -1 + assert record["comment"] == "Response was inaccurate" + await _cleanup() + + @pytest.mark.anyio + async def test_create_with_message_id(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + record = await repo.create(run_id="r1", thread_id="t1", rating=1, message_id="msg-42") + assert record["message_id"] == "msg-42" + await _cleanup() + + @pytest.mark.anyio + async def test_create_with_owner(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + record = await repo.create(run_id="r1", thread_id="t1", rating=1, user_id="user-1") + assert record["user_id"] == "user-1" + await _cleanup() + + @pytest.mark.anyio + async def test_create_invalid_rating_zero(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + with pytest.raises(ValueError): + await repo.create(run_id="r1", thread_id="t1", rating=0) + await _cleanup() + + @pytest.mark.anyio + async def test_create_invalid_rating_five(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + with pytest.raises(ValueError): + await repo.create(run_id="r1", thread_id="t1", rating=5) + await _cleanup() + + @pytest.mark.anyio + async def test_get(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + created = await repo.create(run_id="r1", thread_id="t1", rating=1) + fetched = await repo.get(created["feedback_id"]) + assert fetched is not None + assert fetched["feedback_id"] == created["feedback_id"] + assert fetched["rating"] == 1 + await _cleanup() + + @pytest.mark.anyio + async def test_get_nonexistent(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + assert await repo.get("nonexistent") is None + await _cleanup() + + @pytest.mark.anyio + async def test_list_by_run(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + await repo.create(run_id="r1", thread_id="t1", rating=1, user_id="user-1") + await repo.create(run_id="r1", thread_id="t1", rating=-1, user_id="user-2") + await repo.create(run_id="r2", thread_id="t1", rating=1, user_id="user-1") + results = await repo.list_by_run("t1", "r1", user_id=None) + assert len(results) == 2 + assert all(r["run_id"] == "r1" for r in results) + await _cleanup() + + @pytest.mark.anyio + async def test_list_by_thread(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + await repo.create(run_id="r1", thread_id="t1", rating=1) + await repo.create(run_id="r2", thread_id="t1", rating=-1) + await repo.create(run_id="r3", thread_id="t2", rating=1) + results = await repo.list_by_thread("t1") + assert len(results) == 2 + assert all(r["thread_id"] == "t1" for r in results) + await _cleanup() + + @pytest.mark.anyio + async def test_delete(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + created = await repo.create(run_id="r1", thread_id="t1", rating=1) + deleted = await repo.delete(created["feedback_id"]) + assert deleted is True + assert await repo.get(created["feedback_id"]) is None + await _cleanup() + + @pytest.mark.anyio + async def test_delete_nonexistent(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + deleted = await repo.delete("nonexistent") + assert deleted is False + await _cleanup() + + @pytest.mark.anyio + async def test_aggregate_by_run(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + await repo.create(run_id="r1", thread_id="t1", rating=1, user_id="user-1") + await repo.create(run_id="r1", thread_id="t1", rating=1, user_id="user-2") + await repo.create(run_id="r1", thread_id="t1", rating=-1, user_id="user-3") + stats = await repo.aggregate_by_run("t1", "r1") + assert stats["total"] == 3 + assert stats["positive"] == 2 + assert stats["negative"] == 1 + assert stats["run_id"] == "r1" + await _cleanup() + + @pytest.mark.anyio + async def test_aggregate_empty(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + stats = await repo.aggregate_by_run("t1", "r1") + assert stats["total"] == 0 + assert stats["positive"] == 0 + assert stats["negative"] == 0 + await _cleanup() + + @pytest.mark.anyio + async def test_upsert_creates_new(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + record = await repo.upsert(run_id="r1", thread_id="t1", rating=1, user_id="u1") + assert record["rating"] == 1 + assert record["feedback_id"] + assert record["user_id"] == "u1" + await _cleanup() + + @pytest.mark.anyio + async def test_upsert_updates_existing(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + first = await repo.upsert(run_id="r1", thread_id="t1", rating=1, user_id="u1") + second = await repo.upsert(run_id="r1", thread_id="t1", rating=-1, user_id="u1", comment="changed my mind") + assert second["feedback_id"] == first["feedback_id"] + assert second["rating"] == -1 + assert second["comment"] == "changed my mind" + await _cleanup() + + @pytest.mark.anyio + async def test_upsert_different_users_separate(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + r1 = await repo.upsert(run_id="r1", thread_id="t1", rating=1, user_id="u1") + r2 = await repo.upsert(run_id="r1", thread_id="t1", rating=-1, user_id="u2") + assert r1["feedback_id"] != r2["feedback_id"] + assert r1["rating"] == 1 + assert r2["rating"] == -1 + await _cleanup() + + @pytest.mark.anyio + async def test_upsert_invalid_rating(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + with pytest.raises(ValueError): + await repo.upsert(run_id="r1", thread_id="t1", rating=0, user_id="u1") + await _cleanup() + + @pytest.mark.anyio + async def test_delete_by_run(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + await repo.upsert(run_id="r1", thread_id="t1", rating=1, user_id="u1") + deleted = await repo.delete_by_run(thread_id="t1", run_id="r1", user_id="u1") + assert deleted is True + results = await repo.list_by_run("t1", "r1", user_id="u1") + assert len(results) == 0 + await _cleanup() + + @pytest.mark.anyio + async def test_delete_by_run_nonexistent(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + deleted = await repo.delete_by_run(thread_id="t1", run_id="r1", user_id="u1") + assert deleted is False + await _cleanup() + + @pytest.mark.anyio + async def test_list_by_thread_grouped(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + await repo.upsert(run_id="r1", thread_id="t1", rating=1, user_id="u1") + await repo.upsert(run_id="r2", thread_id="t1", rating=-1, user_id="u1") + await repo.upsert(run_id="r3", thread_id="t2", rating=1, user_id="u1") + grouped = await repo.list_by_thread_grouped("t1", user_id="u1") + assert "r1" in grouped + assert "r2" in grouped + assert "r3" not in grouped + assert grouped["r1"]["rating"] == 1 + assert grouped["r2"]["rating"] == -1 + await _cleanup() + + @pytest.mark.anyio + async def test_list_by_thread_grouped_empty(self, tmp_path): + repo = await _make_feedback_repo(tmp_path) + grouped = await repo.list_by_thread_grouped("t1", user_id="u1") + assert grouped == {} + await _cleanup() + + +# -- Follow-up association -- + + +class TestFollowUpAssociation: + @pytest.mark.anyio + async def test_run_records_follow_up_via_memory_store(self): + """MemoryRunStore stores follow_up_to_run_id in kwargs.""" + from deerflow.runtime.runs.store.memory import MemoryRunStore + + store = MemoryRunStore() + await store.put("r1", thread_id="t1", status="success") + # MemoryRunStore doesn't have follow_up_to_run_id as a top-level param, + # but it can be passed via metadata + await store.put("r2", thread_id="t1", metadata={"follow_up_to_run_id": "r1"}) + run = await store.get("r2") + assert run["metadata"]["follow_up_to_run_id"] == "r1" + + @pytest.mark.anyio + async def test_human_message_has_follow_up_metadata(self): + """human_message event metadata includes follow_up_to_run_id.""" + from deerflow.runtime.events.store.memory import MemoryRunEventStore + + event_store = MemoryRunEventStore() + await event_store.put( + thread_id="t1", + run_id="r2", + event_type="human_message", + category="message", + content="Tell me more about that", + metadata={"follow_up_to_run_id": "r1"}, + ) + messages = await event_store.list_messages("t1") + assert messages[0]["metadata"]["follow_up_to_run_id"] == "r1" + + @pytest.mark.anyio + async def test_follow_up_auto_detection_logic(self): + """Simulate the auto-detection: latest successful run becomes follow_up_to.""" + from deerflow.runtime.runs.store.memory import MemoryRunStore + + store = MemoryRunStore() + await store.put("r1", thread_id="t1", status="success") + await store.put("r2", thread_id="t1", status="error") + + # Auto-detect: list_by_thread returns newest first + recent = await store.list_by_thread("t1", limit=1) + follow_up = None + if recent and recent[0].get("status") == "success": + follow_up = recent[0]["run_id"] + # r2 (error) is newest, so no follow_up detected + assert follow_up is None + + # Now add a successful run + await store.put("r3", thread_id="t1", status="success") + recent = await store.list_by_thread("t1", limit=1) + follow_up = None + if recent and recent[0].get("status") == "success": + follow_up = recent[0]["run_id"] + assert follow_up == "r3" diff --git a/backend/tests/test_gateway_deps_config.py b/backend/tests/test_gateway_deps_config.py new file mode 100644 index 000000000..70f9124b6 --- /dev/null +++ b/backend/tests/test_gateway_deps_config.py @@ -0,0 +1,41 @@ +from __future__ import annotations + +from fastapi import Depends, FastAPI +from fastapi.testclient import TestClient + +from app.gateway.deps import get_config +from deerflow.config.app_config import AppConfig +from deerflow.config.sandbox_config import SandboxConfig + + +def test_get_config_returns_app_state_config(): + """get_config should return the exact AppConfig stored on app.state.""" + app = FastAPI() + config = AppConfig(sandbox=SandboxConfig(use="test")) + app.state.config = config + + @app.get("/probe") + def probe(cfg: AppConfig = Depends(get_config)): + return {"same_identity": cfg is config, "log_level": cfg.log_level} + + client = TestClient(app) + response = client.get("/probe") + + assert response.status_code == 200 + assert response.json() == {"same_identity": True, "log_level": "info"} + + +def test_get_config_reads_updated_app_state(): + """Swapping app.state.config should be visible to the dependency.""" + app = FastAPI() + app.state.config = AppConfig(sandbox=SandboxConfig(use="test"), log_level="info") + + @app.get("/log-level") + def log_level(cfg: AppConfig = Depends(get_config)): + return {"level": cfg.log_level} + + client = TestClient(app) + assert client.get("/log-level").json() == {"level": "info"} + + app.state.config = app.state.config.model_copy(update={"log_level": "debug"}) + assert client.get("/log-level").json() == {"level": "debug"} diff --git a/backend/tests/test_gateway_docs_toggle.py b/backend/tests/test_gateway_docs_toggle.py new file mode 100644 index 000000000..54392ee2e --- /dev/null +++ b/backend/tests/test_gateway_docs_toggle.py @@ -0,0 +1,124 @@ +"""Tests for GATEWAY_ENABLE_DOCS configuration toggle. + +Verifies that Swagger UI (/docs), ReDoc (/redoc), and the OpenAPI schema +(/openapi.json) can be disabled via the GATEWAY_ENABLE_DOCS environment +variable for production deployments. +""" + +from __future__ import annotations + +import os +from unittest.mock import patch + +import pytest +from fastapi.testclient import TestClient + + +def _reset_gateway_config(): + """Reset the cached gateway config so env changes take effect.""" + import app.gateway.config as cfg + + cfg._gateway_config = None + + +@pytest.fixture(autouse=True) +def _clean_config(): + """Ensure gateway config cache is cleared before and after each test.""" + _reset_gateway_config() + yield + _reset_gateway_config() + + +# --------------------------------------------------------------------------- +# Config parsing +# --------------------------------------------------------------------------- + + +def test_enable_docs_defaults_to_true(): + """When GATEWAY_ENABLE_DOCS is not set, enable_docs should be True.""" + with patch.dict(os.environ, {}, clear=False): + if "GATEWAY_ENABLE_DOCS" in os.environ: + del os.environ["GATEWAY_ENABLE_DOCS"] + _reset_gateway_config() + from app.gateway.config import get_gateway_config + + config = get_gateway_config() + assert config.enable_docs is True + + +def test_enable_docs_false(): + """GATEWAY_ENABLE_DOCS=false should disable docs.""" + with patch.dict(os.environ, {"GATEWAY_ENABLE_DOCS": "false"}): + _reset_gateway_config() + from app.gateway.config import get_gateway_config + + config = get_gateway_config() + assert config.enable_docs is False + + +def test_enable_docs_case_insensitive(): + """GATEWAY_ENABLE_DOCS is case-insensitive (FALSE, False, false).""" + for value in ("FALSE", "False", "false"): + with patch.dict(os.environ, {"GATEWAY_ENABLE_DOCS": value}): + _reset_gateway_config() + from app.gateway.config import get_gateway_config + + config = get_gateway_config() + assert config.enable_docs is False, f"Expected False for GATEWAY_ENABLE_DOCS={value}" + + +def test_enable_docs_unexpected_value_disables(): + """Any non-'true' value should disable docs (fail-closed).""" + for value in ("0", "no", "off", "anything"): + with patch.dict(os.environ, {"GATEWAY_ENABLE_DOCS": value}): + _reset_gateway_config() + from app.gateway.config import get_gateway_config + + config = get_gateway_config() + assert config.enable_docs is False, f"Expected False for GATEWAY_ENABLE_DOCS={value}" + + +# --------------------------------------------------------------------------- +# App-level endpoint visibility +# --------------------------------------------------------------------------- + + +def test_docs_endpoints_available_by_default(): + """With enable_docs=True (default), /docs, /redoc, /openapi.json return 200.""" + with patch.dict(os.environ, {}, clear=False): + if "GATEWAY_ENABLE_DOCS" in os.environ: + del os.environ["GATEWAY_ENABLE_DOCS"] + _reset_gateway_config() + from app.gateway.app import create_app + + app = create_app() + client = TestClient(app) + assert client.get("/docs").status_code == 200 + assert client.get("/redoc").status_code == 200 + assert client.get("/openapi.json").status_code == 200 + + +def test_docs_endpoints_disabled_when_false(): + """With GATEWAY_ENABLE_DOCS=false, /docs, /redoc, /openapi.json return 404.""" + with patch.dict(os.environ, {"GATEWAY_ENABLE_DOCS": "false"}): + _reset_gateway_config() + from app.gateway.app import create_app + + app = create_app() + client = TestClient(app) + assert client.get("/docs").status_code == 404 + assert client.get("/redoc").status_code == 404 + assert client.get("/openapi.json").status_code == 404 + + +def test_health_still_works_when_docs_disabled(): + """Disabling docs should NOT affect /health or other normal endpoints.""" + with patch.dict(os.environ, {"GATEWAY_ENABLE_DOCS": "false"}): + _reset_gateway_config() + from app.gateway.app import create_app + + app = create_app() + client = TestClient(app) + resp = client.get("/health") + assert resp.status_code == 200 + assert resp.json()["status"] == "healthy" diff --git a/backend/tests/test_gateway_lifespan_shutdown.py b/backend/tests/test_gateway_lifespan_shutdown.py new file mode 100644 index 000000000..9319c6268 --- /dev/null +++ b/backend/tests/test_gateway_lifespan_shutdown.py @@ -0,0 +1,68 @@ +"""Regression tests for Gateway lifespan shutdown. + +These tests guard the invariant that lifespan shutdown is *bounded*: a +misbehaving channel whose ``stop()`` blocks forever must not keep the +uvicorn worker alive. A hung worker is the precondition for the +signal-reentrancy deadlock described in +``app.gateway.app._SHUTDOWN_HOOK_TIMEOUT_SECONDS``. +""" + +from __future__ import annotations + +import asyncio +from contextlib import asynccontextmanager +from unittest.mock import MagicMock, patch + +from fastapi import FastAPI + + +@asynccontextmanager +async def _noop_langgraph_runtime(_app): + yield + + +async def _run_lifespan_with_hanging_stop() -> float: + """Drive the lifespan context with stop_channel_service hanging forever. + + Returns the elapsed wall-clock seconds. + """ + from app.gateway.app import _SHUTDOWN_HOOK_TIMEOUT_SECONDS, lifespan + + async def hang_forever() -> None: + await asyncio.sleep(3600) + + app = FastAPI() + + fake_service = MagicMock() + fake_service.get_status = MagicMock(return_value={}) + + async def fake_start(): + return fake_service + + with ( + patch("app.gateway.app.get_app_config"), + patch("app.gateway.app.get_gateway_config", return_value=MagicMock(host="x", port=0)), + patch("app.gateway.app.langgraph_runtime", _noop_langgraph_runtime), + patch("app.channels.service.start_channel_service", side_effect=fake_start), + patch("app.channels.service.stop_channel_service", side_effect=hang_forever), + ): + loop = asyncio.get_event_loop() + start = loop.time() + async with lifespan(app): + pass + elapsed = loop.time() - start + + assert _SHUTDOWN_HOOK_TIMEOUT_SECONDS < 30.0, "Timeout constant must stay modest" + return elapsed + + +def test_shutdown_is_bounded_when_channel_stop_hangs(): + """Lifespan exit must complete near the configured timeout, not hang.""" + from app.gateway.app import _SHUTDOWN_HOOK_TIMEOUT_SECONDS + + elapsed = asyncio.run(_run_lifespan_with_hanging_stop()) + + # Generous upper bound: timeout + 2s slack for scheduling overhead. + assert elapsed < _SHUTDOWN_HOOK_TIMEOUT_SECONDS + 2.0, f"Lifespan shutdown took {elapsed:.2f}s; expected <= {_SHUTDOWN_HOOK_TIMEOUT_SECONDS + 2.0:.1f}s" + # Lower bound: the wait_for should actually have waited. + assert elapsed >= _SHUTDOWN_HOOK_TIMEOUT_SECONDS - 0.5, f"Lifespan exited too quickly ({elapsed:.2f}s); wait_for may not have been invoked." diff --git a/backend/tests/test_gateway_runtime_cleanup.py b/backend/tests/test_gateway_runtime_cleanup.py new file mode 100644 index 000000000..2cc184215 --- /dev/null +++ b/backend/tests/test_gateway_runtime_cleanup.py @@ -0,0 +1,62 @@ +"""Regression coverage for the Gateway-owned LangGraph API runtime.""" + +from __future__ import annotations + +import re +from pathlib import Path + +REPO_ROOT = Path(__file__).resolve().parents[2] + + +def _read(path: str) -> str: + return (REPO_ROOT / path).read_text(encoding="utf-8") + + +def test_root_makefile_no_longer_exposes_transition_gateway_targets(): + makefile = _read("Makefile") + + assert "dev-pro" not in makefile + assert "start-pro" not in makefile + assert "dev-daemon-pro" not in makefile + assert "start-daemon-pro" not in makefile + assert "docker-start-pro" not in makefile + assert "up-pro" not in makefile + assert not re.search(r"serve\.sh .*--gateway", makefile) + assert "docker.sh start --gateway" not in makefile + assert "deploy.sh --gateway" not in makefile + + +def test_service_launchers_always_use_gateway_runtime(): + operational_files = { + "scripts/serve.sh": _read("scripts/serve.sh"), + "scripts/docker.sh": _read("scripts/docker.sh"), + "scripts/deploy.sh": _read("scripts/deploy.sh"), + "docker/docker-compose-dev.yaml": _read("docker/docker-compose-dev.yaml"), + "docker/docker-compose.yaml": _read("docker/docker-compose.yaml"), + } + + for path, content in operational_files.items(): + assert "start --gateway" not in content, path + assert "deploy.sh --gateway" not in content, path + assert "langgraph dev" not in content, path + assert "LANGGRAPH_UPSTREAM" not in content, path + assert "LANGGRAPH_REWRITE" not in content, path + + +def test_nginx_routes_official_langgraph_prefix_to_gateway_api(): + for path in ("docker/nginx/nginx.local.conf", "docker/nginx/nginx.conf"): + content = _read(path) + + assert "/api/langgraph-compat" not in content + assert "proxy_pass http://langgraph" not in content + assert "rewrite ^/api/langgraph/(.*) /api/$1 break;" in content + assert "proxy_pass http://gateway" in content + + +def test_frontend_rewrites_langgraph_prefix_to_gateway(): + next_config = _read("frontend/next.config.js") + api_client = _read("frontend/src/core/api/api-client.ts") + + assert "DEER_FLOW_INTERNAL_LANGGRAPH_BASE_URL" not in next_config + assert "http://127.0.0.1:2024" not in next_config + assert "langgraph-compat" not in api_client diff --git a/backend/tests/test_gateway_services.py b/backend/tests/test_gateway_services.py index 782306e38..013991b82 100644 --- a/backend/tests/test_gateway_services.py +++ b/backend/tests/test_gateway_services.py @@ -145,6 +145,21 @@ def test_build_run_config_explicit_agent_name_not_overwritten(): assert config["configurable"]["agent_name"] == "explicit-agent" +def test_build_run_config_context_custom_agent_injects_agent_name(): + """Custom assistant_id must be forwarded as context['agent_name'] in context mode.""" + from app.gateway.services import build_run_config + + config = build_run_config( + "thread-1", + {"context": {"model_name": "deepseek-v3"}}, + None, + assistant_id="finalis", + ) + + assert config["context"]["agent_name"] == "finalis" + assert "configurable" not in config + + def test_resolve_agent_factory_returns_make_lead_agent(): """resolve_agent_factory always returns make_lead_agent regardless of assistant_id.""" from app.gateway.services import resolve_agent_factory @@ -241,6 +256,37 @@ def test_context_merges_into_configurable(): assert "thread_id" not in {k for k in context if k in _CONTEXT_CONFIGURABLE_KEYS} +def test_merge_run_context_overrides_propagates_to_runtime_context(): + """Regression for issue #2677: ``agent_name`` (and other whitelisted keys) from + ``body.context`` must be propagated into BOTH ``config['configurable']`` and + ``config['context']``. Previously only ``configurable`` was populated, so after + the LangGraph 1.1.x upgrade removed the fallback from ``configurable``, the + ``setup_agent`` tool read ``runtime.context`` with ``agent_name=None`` and + silently wrote SOUL.md to the global base_dir. + """ + from app.gateway.services import build_run_config, merge_run_context_overrides + + config = build_run_config("thread-1", None, None) + merge_run_context_overrides(config, {"agent_name": "my-agent", "is_bootstrap": True, "thread_id": "ignored"}) + + assert config["configurable"]["agent_name"] == "my-agent" + assert config["configurable"]["is_bootstrap"] is True + assert config["context"]["agent_name"] == "my-agent" + assert config["context"]["is_bootstrap"] is True + # Non-whitelisted keys are not forwarded. + assert "thread_id" not in config["context"] + + +def test_merge_run_context_overrides_noop_for_empty_context(): + from app.gateway.services import build_run_config, merge_run_context_overrides + + config = build_run_config("thread-1", None, None) + before = {k: dict(v) if isinstance(v, dict) else v for k, v in config.items()} + merge_run_context_overrides(config, None) + merge_run_context_overrides(config, {}) + assert config == before + + def test_context_does_not_override_existing_configurable(): """Values already in config.configurable must NOT be overridden by context.""" from app.gateway.services import build_run_config @@ -298,6 +344,36 @@ def test_build_run_config_with_context(): assert config["recursion_limit"] == 100 +def test_build_run_config_null_context_becomes_empty_context(): + """When caller sends context=null, treat it as an empty context object.""" + from app.gateway.services import build_run_config + + config = build_run_config("thread-1", {"context": None}, None) + + assert config["context"] == {} + assert "configurable" not in config + + +def test_build_run_config_rejects_non_mapping_context(): + """When caller sends a non-object context, raise a clear error instead of a TypeError.""" + import pytest + + from app.gateway.services import build_run_config + + with pytest.raises(ValueError, match="context"): + build_run_config("thread-1", {"context": "bad-context"}, None) + + +def test_build_run_config_null_context_custom_agent_injects_agent_name(): + """Custom assistant_id can still be injected when context=null starts context mode.""" + from app.gateway.services import build_run_config + + config = build_run_config("thread-1", {"context": None}, None, assistant_id="finalis") + + assert config["context"] == {"agent_name": "finalis"} + assert "configurable" not in config + + def test_build_run_config_context_plus_configurable_warns(caplog): """When caller sends both 'context' and 'configurable', prefer 'context' and log a warning.""" import logging diff --git a/backend/tests/test_initialize_admin.py b/backend/tests/test_initialize_admin.py new file mode 100644 index 000000000..26b2ec6b2 --- /dev/null +++ b/backend/tests/test_initialize_admin.py @@ -0,0 +1,182 @@ +"""Tests for the POST /api/v1/auth/initialize endpoint. + +Covers: first-boot admin creation, rejection when system already +initialized, password strength validation, +and public accessibility (no auth cookie required). +""" + +import asyncio +import os + +import pytest +from fastapi.testclient import TestClient + +os.environ.setdefault("AUTH_JWT_SECRET", "test-secret-key-initialize-admin-min-32") + +from app.gateway.auth.config import AuthConfig, set_auth_config + +_TEST_SECRET = "test-secret-key-initialize-admin-min-32" + + +@pytest.fixture(autouse=True) +def _setup_auth(tmp_path): + """Fresh SQLite engine + auth config per test.""" + from app.gateway import deps + from app.gateway.routers.auth import _SETUP_STATUS_COOLDOWN + from deerflow.persistence.engine import close_engine, init_engine + + set_auth_config(AuthConfig(jwt_secret=_TEST_SECRET)) + url = f"sqlite+aiosqlite:///{tmp_path}/init_admin.db" + asyncio.run(init_engine("sqlite", url=url, sqlite_dir=str(tmp_path))) + deps._cached_local_provider = None + deps._cached_repo = None + _SETUP_STATUS_COOLDOWN.clear() + try: + yield + finally: + deps._cached_local_provider = None + deps._cached_repo = None + _SETUP_STATUS_COOLDOWN.clear() + asyncio.run(close_engine()) + + +@pytest.fixture() +def client(_setup_auth): + from app.gateway.app import create_app + from app.gateway.auth.config import AuthConfig, set_auth_config + + set_auth_config(AuthConfig(jwt_secret=_TEST_SECRET)) + app = create_app() + # Do NOT use TestClient as a context manager — that would trigger the + # full lifespan which requires config.yaml. The auth endpoints work + # without the lifespan (persistence engine is set up by _setup_auth). + yield TestClient(app) + + +def _init_payload(**extra): + """Build a valid /initialize payload.""" + return { + "email": "admin@example.com", + "password": "Str0ng!Pass99", + **extra, + } + + +# ── Happy path ──────────────────────────────────────────────────────────── + + +def test_initialize_creates_admin_and_sets_cookie(client): + """POST /initialize when no admin exists → 201, session cookie set.""" + resp = client.post("/api/v1/auth/initialize", json=_init_payload()) + assert resp.status_code == 201 + data = resp.json() + assert data["email"] == "admin@example.com" + assert data["system_role"] == "admin" + assert "access_token" in resp.cookies + + +def test_initialize_needs_setup_false(client): + """Newly created admin via /initialize has needs_setup=False.""" + client.post("/api/v1/auth/initialize", json=_init_payload()) + me = client.get("/api/v1/auth/me") + assert me.status_code == 200 + assert me.json()["needs_setup"] is False + + +# ── Rejection when already initialized ─────────────────────────────────── + + +def test_initialize_rejected_when_admin_exists(client): + """Second call to /initialize after admin exists → 409 system_already_initialized.""" + client.post("/api/v1/auth/initialize", json=_init_payload()) + resp2 = client.post( + "/api/v1/auth/initialize", + json={**_init_payload(), "email": "other@example.com"}, + ) + assert resp2.status_code == 409 + body = resp2.json() + assert body["detail"]["code"] == "system_already_initialized" + + +def test_initialize_register_does_not_block_initialization(client): + """/register creating a user before /initialize doesn't block admin creation.""" + # Register a regular user first + client.post("/api/v1/auth/register", json={"email": "regular@example.com", "password": "Tr0ub4dor3a"}) + # /initialize should still succeed (checks admin_count, not total user_count) + resp = client.post("/api/v1/auth/initialize", json=_init_payload()) + assert resp.status_code == 201 + assert resp.json()["system_role"] == "admin" + + +# ── Endpoint is public (no cookie required) ─────────────────────────────── + + +def test_initialize_accessible_without_cookie(client): + """No access_token cookie needed for /initialize.""" + resp = client.post( + "/api/v1/auth/initialize", + json=_init_payload(), + cookies={}, + ) + assert resp.status_code == 201 + + +# ── Password validation ─────────────────────────────────────────────────── + + +def test_initialize_rejects_short_password(client): + """Password shorter than 8 chars → 422.""" + resp = client.post( + "/api/v1/auth/initialize", + json={**_init_payload(), "password": "short"}, + ) + assert resp.status_code == 422 + + +def test_initialize_rejects_common_password(client): + """Common password → 422.""" + resp = client.post( + "/api/v1/auth/initialize", + json={**_init_payload(), "password": "password123"}, + ) + assert resp.status_code == 422 + + +# ── setup-status reflects initialization ───────────────────────────────── + + +def test_setup_status_before_initialization(client): + """setup-status returns needs_setup=True before /initialize is called.""" + resp = client.get("/api/v1/auth/setup-status") + assert resp.status_code == 200 + assert resp.json()["needs_setup"] is True + + +def test_setup_status_after_initialization(client): + """setup-status returns needs_setup=False after /initialize succeeds.""" + client.post("/api/v1/auth/initialize", json=_init_payload()) + resp = client.get("/api/v1/auth/setup-status") + assert resp.status_code == 200 + assert resp.json()["needs_setup"] is False + + +def test_setup_status_false_when_only_regular_user_exists(client): + """setup-status returns needs_setup=True even when regular users exist (no admin).""" + client.post("/api/v1/auth/register", json={"email": "regular@example.com", "password": "Tr0ub4dor3a"}) + resp = client.get("/api/v1/auth/setup-status") + assert resp.status_code == 200 + assert resp.json()["needs_setup"] is True + + +def test_setup_status_rate_limited_on_second_call(client): + """Second /setup-status call within the cooldown window returns 429 with Retry-After.""" + # First call succeeds. + resp1 = client.get("/api/v1/auth/setup-status") + assert resp1.status_code == 200 + + # Immediate second call is rate-limited. + resp2 = client.get("/api/v1/auth/setup-status") + assert resp2.status_code == 429 + assert "Retry-After" in resp2.headers + retry_after = int(resp2.headers["Retry-After"]) + assert 1 <= retry_after <= 60 diff --git a/backend/tests/test_invoke_acp_agent_tool.py b/backend/tests/test_invoke_acp_agent_tool.py index 8063875cf..3c5f6f0ff 100644 --- a/backend/tests/test_invoke_acp_agent_tool.py +++ b/backend/tests/test_invoke_acp_agent_tool.py @@ -152,8 +152,10 @@ def test_get_work_dir_uses_base_dir_when_no_thread_id(monkeypatch, tmp_path): def test_get_work_dir_uses_per_thread_path_when_thread_id_given(monkeypatch, tmp_path): """P1.1: _get_work_dir(thread_id) uses {base_dir}/threads/{thread_id}/acp-workspace/.""" from deerflow.config import paths as paths_module + from deerflow.runtime import user_context as uc_module monkeypatch.setattr(paths_module, "get_paths", lambda: paths_module.Paths(base_dir=tmp_path)) + monkeypatch.setattr(uc_module, "get_effective_user_id", lambda: None) result = _get_work_dir("thread-abc-123") expected = tmp_path / "threads" / "thread-abc-123" / "acp-workspace" assert result == str(expected) @@ -310,8 +312,10 @@ async def test_invoke_acp_agent_uses_fixed_acp_workspace(monkeypatch, tmp_path): async def test_invoke_acp_agent_uses_per_thread_workspace_when_thread_id_in_config(monkeypatch, tmp_path): """P1.1: When thread_id is in the RunnableConfig, ACP agent uses per-thread workspace.""" from deerflow.config import paths as paths_module + from deerflow.runtime import user_context as uc_module monkeypatch.setattr(paths_module, "get_paths", lambda: paths_module.Paths(base_dir=tmp_path)) + monkeypatch.setattr(uc_module, "get_effective_user_id", lambda: None) monkeypatch.setattr( "deerflow.config.extensions_config.ExtensionsConfig.from_file", diff --git a/backend/tests/test_jina_client.py b/backend/tests/test_jina_client.py index 5a1d6f6fa..b1856e4ae 100644 --- a/backend/tests/test_jina_client.py +++ b/backend/tests/test_jina_client.py @@ -80,6 +80,28 @@ async def test_crawl_network_error(jina_client, monkeypatch): assert "failed" in result.lower() +@pytest.mark.anyio +async def test_crawl_transient_failure_logs_without_traceback(jina_client, monkeypatch, caplog): + """Transient network failures must log at WARNING without a traceback and include the exception type.""" + + async def mock_post(self, url, **kwargs): + raise httpx.ConnectTimeout("timed out") + + monkeypatch.setattr(httpx.AsyncClient, "post", mock_post) + + with caplog.at_level(logging.DEBUG, logger="deerflow.community.jina_ai.jina_client"): + result = await jina_client.crawl("https://example.com") + + jina_records = [r for r in caplog.records if r.name == "deerflow.community.jina_ai.jina_client"] + assert len(jina_records) == 1, f"expected exactly one log record, got {len(jina_records)}" + record = jina_records[0] + assert record.levelno == logging.WARNING, f"expected WARNING, got {record.levelname}" + assert record.exc_info is None, "transient failures must not attach a traceback" + assert "ConnectTimeout" in record.getMessage() + assert result.startswith("Error:") + assert "ConnectTimeout" in result + + @pytest.mark.anyio async def test_crawl_passes_headers(jina_client, monkeypatch): """Test that correct headers are sent.""" diff --git a/backend/tests/test_langgraph_auth.py b/backend/tests/test_langgraph_auth.py new file mode 100644 index 000000000..d2ee81051 --- /dev/null +++ b/backend/tests/test_langgraph_auth.py @@ -0,0 +1,312 @@ +"""Tests for LangGraph Server auth handler (langgraph_auth.py). + +Validates that the LangGraph auth layer enforces the same rules as Gateway: + cookie → JWT decode → DB lookup → token_version check → owner filter +""" + +import asyncio +import os +from datetime import timedelta +from pathlib import Path +from types import SimpleNamespace +from unittest.mock import AsyncMock, patch +from uuid import uuid4 + +import pytest + +os.environ.setdefault("AUTH_JWT_SECRET", "test-secret-key-for-langgraph-auth-testing-min-32") + +from langgraph_sdk import Auth + +from app.gateway.auth.config import AuthConfig, set_auth_config +from app.gateway.auth.jwt import create_access_token, decode_token +from app.gateway.auth.models import User +from app.gateway.langgraph_auth import add_owner_filter, authenticate + +# ── Helpers ─────────────────────────────────────────────────────────────── + +_JWT_SECRET = "test-secret-key-for-langgraph-auth-testing-min-32" + + +@pytest.fixture(autouse=True) +def _setup_auth_config(): + set_auth_config(AuthConfig(jwt_secret=_JWT_SECRET)) + yield + set_auth_config(AuthConfig(jwt_secret=_JWT_SECRET)) + + +def _req(cookies=None, method="GET", headers=None): + return SimpleNamespace(cookies=cookies or {}, method=method, headers=headers or {}) + + +def _user(user_id=None, token_version=0): + return User(email="test@example.com", password_hash="fakehash", system_role="user", id=user_id or uuid4(), token_version=token_version) + + +def _mock_provider(user=None): + p = AsyncMock() + p.get_user = AsyncMock(return_value=user) + return p + + +# ── @auth.authenticate ─────────────────────────────────────────────────── + + +def test_no_cookie_raises_401(): + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run(authenticate(_req())) + assert exc.value.status_code == 401 + assert "Not authenticated" in str(exc.value.detail) + + +def test_invalid_jwt_raises_401(): + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run(authenticate(_req({"access_token": "garbage"}))) + assert exc.value.status_code == 401 + assert "Invalid token" in str(exc.value.detail) + + +def test_expired_jwt_raises_401(): + token = create_access_token("user-1", expires_delta=timedelta(seconds=-1)) + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run(authenticate(_req({"access_token": token}))) + assert exc.value.status_code == 401 + + +def test_user_not_found_raises_401(): + token = create_access_token("ghost") + with patch("app.gateway.langgraph_auth.get_local_provider", return_value=_mock_provider(None)): + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run(authenticate(_req({"access_token": token}))) + assert exc.value.status_code == 401 + assert "User not found" in str(exc.value.detail) + + +def test_token_version_mismatch_raises_401(): + user = _user(token_version=2) + token = create_access_token(str(user.id), token_version=1) + with patch("app.gateway.langgraph_auth.get_local_provider", return_value=_mock_provider(user)): + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run(authenticate(_req({"access_token": token}))) + assert exc.value.status_code == 401 + assert "revoked" in str(exc.value.detail).lower() + + +def test_valid_token_returns_user_id(): + user = _user(token_version=0) + token = create_access_token(str(user.id), token_version=0) + with patch("app.gateway.langgraph_auth.get_local_provider", return_value=_mock_provider(user)): + result = asyncio.run(authenticate(_req({"access_token": token}))) + assert result == str(user.id) + + +def test_valid_token_matching_version(): + user = _user(token_version=5) + token = create_access_token(str(user.id), token_version=5) + with patch("app.gateway.langgraph_auth.get_local_provider", return_value=_mock_provider(user)): + result = asyncio.run(authenticate(_req({"access_token": token}))) + assert result == str(user.id) + + +# ── @auth.authenticate edge cases ──────────────────────────────────────── + + +def test_provider_exception_propagates(): + """Provider raises → should not be swallowed silently.""" + token = create_access_token("user-1") + p = AsyncMock() + p.get_user = AsyncMock(side_effect=RuntimeError("DB down")) + with patch("app.gateway.langgraph_auth.get_local_provider", return_value=p): + with pytest.raises(RuntimeError, match="DB down"): + asyncio.run(authenticate(_req({"access_token": token}))) + + +def test_jwt_missing_ver_defaults_to_zero(): + """JWT without 'ver' claim → decoded as ver=0, matches user with token_version=0.""" + import jwt as pyjwt + + uid = str(uuid4()) + raw = pyjwt.encode({"sub": uid, "exp": 9999999999, "iat": 1000000000}, _JWT_SECRET, algorithm="HS256") + user = _user(user_id=uid, token_version=0) + with patch("app.gateway.langgraph_auth.get_local_provider", return_value=_mock_provider(user)): + result = asyncio.run(authenticate(_req({"access_token": raw}))) + assert result == uid + + +def test_jwt_missing_ver_rejected_when_user_version_nonzero(): + """JWT without 'ver' (defaults 0) vs user with token_version=1 → 401.""" + import jwt as pyjwt + + uid = str(uuid4()) + raw = pyjwt.encode({"sub": uid, "exp": 9999999999, "iat": 1000000000}, _JWT_SECRET, algorithm="HS256") + user = _user(user_id=uid, token_version=1) + with patch("app.gateway.langgraph_auth.get_local_provider", return_value=_mock_provider(user)): + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run(authenticate(_req({"access_token": raw}))) + assert exc.value.status_code == 401 + + +def test_wrong_secret_raises_401(): + """Token signed with different secret → 401.""" + import jwt as pyjwt + + raw = pyjwt.encode({"sub": "user-1", "exp": 9999999999, "ver": 0}, "wrong-secret-that-is-long-enough-32chars!", algorithm="HS256") + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run(authenticate(_req({"access_token": raw}))) + assert exc.value.status_code == 401 + + +# ── @auth.on (owner filter) ────────────────────────────────────────────── + + +class _FakeUser: + """Minimal BaseUser-compatible object without langgraph_api.config dependency.""" + + def __init__(self, identity: str): + self.identity = identity + self.is_authenticated = True + self.display_name = identity + + +def _make_ctx(user_id): + return Auth.types.AuthContext(resource="threads", action="create", user=_FakeUser(user_id), permissions=[]) + + +def test_filter_injects_user_id(): + value = {} + asyncio.run(add_owner_filter(_make_ctx("user-a"), value)) + assert value["metadata"]["user_id"] == "user-a" + + +def test_filter_preserves_existing_metadata(): + value = {"metadata": {"title": "hello"}} + asyncio.run(add_owner_filter(_make_ctx("user-a"), value)) + assert value["metadata"]["user_id"] == "user-a" + assert value["metadata"]["title"] == "hello" + + +def test_filter_returns_user_id_dict(): + result = asyncio.run(add_owner_filter(_make_ctx("user-x"), {})) + assert result == {"user_id": "user-x"} + + +def test_filter_read_write_consistency(): + value = {} + filter_dict = asyncio.run(add_owner_filter(_make_ctx("user-1"), value)) + assert value["metadata"]["user_id"] == filter_dict["user_id"] + + +def test_different_users_different_filters(): + f_a = asyncio.run(add_owner_filter(_make_ctx("a"), {})) + f_b = asyncio.run(add_owner_filter(_make_ctx("b"), {})) + assert f_a["user_id"] != f_b["user_id"] + + +def test_filter_overrides_conflicting_user_id(): + """If value already has a different user_id in metadata, it gets overwritten.""" + value = {"metadata": {"user_id": "attacker"}} + asyncio.run(add_owner_filter(_make_ctx("real-owner"), value)) + assert value["metadata"]["user_id"] == "real-owner" + + +def test_filter_with_empty_metadata(): + """Explicit empty metadata dict is fine.""" + value = {"metadata": {}} + result = asyncio.run(add_owner_filter(_make_ctx("user-z"), value)) + assert value["metadata"]["user_id"] == "user-z" + assert result == {"user_id": "user-z"} + + +# ── Gateway parity ─────────────────────────────────────────────────────── + + +def test_shared_jwt_secret(): + token = create_access_token("user-1", token_version=3) + payload = decode_token(token) + from app.gateway.auth.errors import TokenError + + assert not isinstance(payload, TokenError) + assert payload.sub == "user-1" + assert payload.ver == 3 + + +def test_langgraph_json_has_auth_path(): + import json + + config = json.loads((Path(__file__).parent.parent / "langgraph.json").read_text()) + assert "auth" in config + assert "langgraph_auth" in config["auth"]["path"] + + +def test_auth_handler_has_both_layers(): + from app.gateway.langgraph_auth import auth + + assert auth._authenticate_handler is not None + assert len(auth._global_handlers) == 1 + + +# ── CSRF in LangGraph auth ────────────────────────────────────────────── + + +def test_csrf_get_no_check(): + """GET requests skip CSRF — should proceed to JWT validation.""" + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run(authenticate(_req(method="GET"))) + # Rejected by missing cookie, NOT by CSRF + assert exc.value.status_code == 401 + assert "Not authenticated" in str(exc.value.detail) + + +def test_csrf_post_missing_token(): + """POST without CSRF token → 403.""" + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run(authenticate(_req(method="POST", cookies={"access_token": "some-jwt"}))) + assert exc.value.status_code == 403 + assert "CSRF token missing" in str(exc.value.detail) + + +def test_csrf_post_mismatched_token(): + """POST with mismatched CSRF tokens → 403.""" + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run( + authenticate( + _req( + method="POST", + cookies={"access_token": "some-jwt", "csrf_token": "real-token"}, + headers={"x-csrf-token": "wrong-token"}, + ) + ) + ) + assert exc.value.status_code == 403 + assert "mismatch" in str(exc.value.detail) + + +def test_csrf_post_matching_token_proceeds_to_jwt(): + """POST with matching CSRF tokens passes CSRF check, then fails on JWT.""" + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run( + authenticate( + _req( + method="POST", + cookies={"access_token": "garbage", "csrf_token": "same-token"}, + headers={"x-csrf-token": "same-token"}, + ) + ) + ) + # Past CSRF, rejected by JWT decode + assert exc.value.status_code == 401 + assert "Invalid token" in str(exc.value.detail) + + +def test_csrf_put_requires_token(): + """PUT also requires CSRF.""" + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run(authenticate(_req(method="PUT", cookies={"access_token": "jwt"}))) + assert exc.value.status_code == 403 + + +def test_csrf_delete_requires_token(): + """DELETE also requires CSRF.""" + with pytest.raises(Auth.exceptions.HTTPException) as exc: + asyncio.run(authenticate(_req(method="DELETE", cookies={"access_token": "jwt"}))) + assert exc.value.status_code == 403 diff --git a/backend/tests/test_lead_agent_model_resolution.py b/backend/tests/test_lead_agent_model_resolution.py index 12a4d0143..c22377b88 100644 --- a/backend/tests/test_lead_agent_model_resolution.py +++ b/backend/tests/test_lead_agent_model_resolution.py @@ -2,6 +2,7 @@ from __future__ import annotations +import inspect from unittest.mock import MagicMock import pytest @@ -33,6 +34,44 @@ def _make_model(name: str, *, supports_thinking: bool) -> ModelConfig: ) +def test_make_lead_agent_signature_matches_langgraph_server_factory_abi(): + assert list(inspect.signature(lead_agent_module.make_lead_agent).parameters) == ["config"] + + +def test_internal_make_lead_agent_uses_explicit_app_config(monkeypatch): + app_config = _make_app_config([_make_model("explicit-model", supports_thinking=False)]) + + import deerflow.tools as tools_module + + def _raise_get_app_config(): + raise AssertionError("ambient get_app_config() must not be used when app_config is explicit") + + monkeypatch.setattr(lead_agent_module, "get_app_config", _raise_get_app_config) + monkeypatch.setattr(tools_module, "get_available_tools", lambda **kwargs: []) + monkeypatch.setattr(lead_agent_module, "_build_middlewares", lambda config, model_name, agent_name=None, **kwargs: []) + + captured: dict[str, object] = {} + + def _fake_create_chat_model(*, name, thinking_enabled, reasoning_effort=None, app_config=None): + captured["name"] = name + captured["app_config"] = app_config + return object() + + monkeypatch.setattr(lead_agent_module, "create_chat_model", _fake_create_chat_model) + monkeypatch.setattr(lead_agent_module, "create_agent", lambda **kwargs: kwargs) + + result = lead_agent_module._make_lead_agent( + {"configurable": {"model_name": "explicit-model"}}, + app_config=app_config, + ) + + assert captured == { + "name": "explicit-model", + "app_config": app_config, + } + assert result["model"] is not None + + def test_resolve_model_name_falls_back_to_default(monkeypatch, caplog): app_config = _make_app_config( [ @@ -84,14 +123,15 @@ def test_make_lead_agent_disables_thinking_when_model_does_not_support_it(monkey monkeypatch.setattr(lead_agent_module, "get_app_config", lambda: app_config) monkeypatch.setattr(tools_module, "get_available_tools", lambda **kwargs: []) - monkeypatch.setattr(lead_agent_module, "_build_middlewares", lambda config, model_name, agent_name=None: []) + monkeypatch.setattr(lead_agent_module, "_build_middlewares", lambda config, model_name, agent_name=None, **kwargs: []) captured: dict[str, object] = {} - def _fake_create_chat_model(*, name, thinking_enabled, reasoning_effort=None): + def _fake_create_chat_model(*, name, thinking_enabled, reasoning_effort=None, app_config=None): captured["name"] = name captured["thinking_enabled"] = thinking_enabled captured["reasoning_effort"] = reasoning_effort + captured["app_config"] = app_config return object() monkeypatch.setattr(lead_agent_module, "create_chat_model", _fake_create_chat_model) @@ -110,6 +150,57 @@ def test_make_lead_agent_disables_thinking_when_model_does_not_support_it(monkey assert captured["name"] == "safe-model" assert captured["thinking_enabled"] is False + assert captured["app_config"] is app_config + assert result["model"] is not None + + +def test_make_lead_agent_reads_runtime_options_from_context(monkeypatch): + app_config = _make_app_config( + [ + _make_model("default-model", supports_thinking=False), + _make_model("context-model", supports_thinking=True), + ] + ) + + import deerflow.tools as tools_module + + get_available_tools = MagicMock(return_value=[]) + monkeypatch.setattr(lead_agent_module, "get_app_config", lambda: app_config) + monkeypatch.setattr(tools_module, "get_available_tools", get_available_tools) + monkeypatch.setattr(lead_agent_module, "_build_middlewares", lambda config, model_name, agent_name=None, **kwargs: []) + + captured: dict[str, object] = {} + + def _fake_create_chat_model(*, name, thinking_enabled, reasoning_effort=None, app_config=None): + captured["name"] = name + captured["thinking_enabled"] = thinking_enabled + captured["reasoning_effort"] = reasoning_effort + captured["app_config"] = app_config + return object() + + monkeypatch.setattr(lead_agent_module, "create_chat_model", _fake_create_chat_model) + monkeypatch.setattr(lead_agent_module, "create_agent", lambda **kwargs: kwargs) + + result = lead_agent_module.make_lead_agent( + { + "context": { + "model_name": "context-model", + "thinking_enabled": False, + "reasoning_effort": "high", + "is_plan_mode": True, + "subagent_enabled": True, + "max_concurrent_subagents": 7, + } + } + ) + + assert captured == { + "name": "context-model", + "thinking_enabled": False, + "reasoning_effort": "high", + "app_config": app_config, + } + get_available_tools.assert_called_once_with(model_name="context-model", groups=None, subagent_enabled=True, app_config=app_config) assert result["model"] is not None @@ -150,16 +241,55 @@ def test_build_middlewares_uses_resolved_model_name_for_vision(monkeypatch): ) monkeypatch.setattr(lead_agent_module, "get_app_config", lambda: app_config) - monkeypatch.setattr(lead_agent_module, "_create_summarization_middleware", lambda: None) + monkeypatch.setattr(lead_agent_module, "_create_summarization_middleware", lambda **kwargs: None) monkeypatch.setattr(lead_agent_module, "_create_todo_list_middleware", lambda is_plan_mode: None) - middlewares = lead_agent_module._build_middlewares({"configurable": {"model_name": "stale-model", "is_plan_mode": False, "subagent_enabled": False}}, model_name="vision-model", custom_middlewares=[MagicMock()]) + middlewares = lead_agent_module._build_middlewares( + {"configurable": {"model_name": "stale-model", "is_plan_mode": False, "subagent_enabled": False}}, + model_name="vision-model", + custom_middlewares=[MagicMock()], + app_config=app_config, + ) assert any(isinstance(m, lead_agent_module.ViewImageMiddleware) for m in middlewares) # verify the custom middleware is injected correctly assert len(middlewares) > 0 and isinstance(middlewares[-2], MagicMock) +def test_build_middlewares_passes_explicit_app_config_to_shared_factory(monkeypatch): + app_config = _make_app_config([_make_model("safe-model", supports_thinking=False)]) + captured: dict[str, object] = {} + + def _raise_get_app_config(): + raise AssertionError("ambient get_app_config() must not be used when app_config is explicit") + + def _fake_build_lead_runtime_middlewares(*, app_config, lazy_init): + captured["app_config"] = app_config + captured["lazy_init"] = lazy_init + return ["base-middleware"] + + monkeypatch.setattr(lead_agent_module, "get_app_config", _raise_get_app_config) + monkeypatch.setattr( + lead_agent_module, + "build_lead_runtime_middlewares", + _fake_build_lead_runtime_middlewares, + ) + monkeypatch.setattr(lead_agent_module, "_create_summarization_middleware", lambda **kwargs: None) + monkeypatch.setattr(lead_agent_module, "_create_todo_list_middleware", lambda is_plan_mode: None) + + middlewares = lead_agent_module._build_middlewares( + {"configurable": {"is_plan_mode": False, "subagent_enabled": False}}, + model_name="safe-model", + app_config=app_config, + ) + + assert captured == { + "app_config": app_config, + "lazy_init": True, + } + assert middlewares[0] == "base-middleware" + + def test_create_summarization_middleware_uses_configured_model_alias(monkeypatch): monkeypatch.setattr( lead_agent_module, @@ -168,42 +298,26 @@ def test_create_summarization_middleware_uses_configured_model_alias(monkeypatch ) monkeypatch.setattr(lead_agent_module, "get_memory_config", lambda: MemoryConfig(enabled=False)) - captured: dict[str, object] = {} - fake_model = object() + from unittest.mock import MagicMock - def _fake_create_chat_model(*, name=None, thinking_enabled, reasoning_effort=None): + captured: dict[str, object] = {} + fake_model = MagicMock() + fake_model.with_config.return_value = fake_model + + def _fake_create_chat_model(*, name=None, thinking_enabled, reasoning_effort=None, app_config=None): captured["name"] = name captured["thinking_enabled"] = thinking_enabled captured["reasoning_effort"] = reasoning_effort + captured["app_config"] = app_config return fake_model monkeypatch.setattr(lead_agent_module, "create_chat_model", _fake_create_chat_model) monkeypatch.setattr(lead_agent_module, "DeerFlowSummarizationMiddleware", lambda **kwargs: kwargs) - middleware = lead_agent_module._create_summarization_middleware() + middleware = lead_agent_module._create_summarization_middleware(app_config=_make_app_config([_make_model("model-masswork", supports_thinking=False)])) assert captured["name"] == "model-masswork" assert captured["thinking_enabled"] is False + assert captured["app_config"] is not None assert middleware["model"] is fake_model - - -def test_create_summarization_middleware_registers_memory_flush_hook_when_memory_enabled(monkeypatch): - monkeypatch.setattr( - lead_agent_module, - "get_summarization_config", - lambda: SummarizationConfig(enabled=True), - ) - monkeypatch.setattr(lead_agent_module, "get_memory_config", lambda: MemoryConfig(enabled=True)) - monkeypatch.setattr(lead_agent_module, "create_chat_model", lambda **kwargs: object()) - - captured: dict[str, object] = {} - - def _fake_middleware(**kwargs): - captured.update(kwargs) - return kwargs - - monkeypatch.setattr(lead_agent_module, "DeerFlowSummarizationMiddleware", _fake_middleware) - - lead_agent_module._create_summarization_middleware() - - assert captured["before_summarization"] == [lead_agent_module.memory_flush_hook] + fake_model.with_config.assert_called_once_with(tags=["middleware:summarize"]) diff --git a/backend/tests/test_lead_agent_prompt.py b/backend/tests/test_lead_agent_prompt.py index 6817e7678..edbcd5193 100644 --- a/backend/tests/test_lead_agent_prompt.py +++ b/backend/tests/test_lead_agent_prompt.py @@ -7,6 +7,15 @@ from deerflow.agents.lead_agent import prompt as prompt_module from deerflow.skills.types import Skill +def _set_skills_cache_state(*, skills=None, active=False, version=0): + prompt_module._get_cached_skills_prompt_section.cache_clear() + with prompt_module._enabled_skills_lock: + prompt_module._enabled_skills_cache = skills + prompt_module._enabled_skills_refresh_active = active + prompt_module._enabled_skills_refresh_version = version + prompt_module._enabled_skills_refresh_event.clear() + + def test_build_custom_mounts_section_returns_empty_when_no_mounts(monkeypatch): config = SimpleNamespace(sandbox=SimpleNamespace(mounts=[])) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) @@ -39,7 +48,7 @@ def test_apply_prompt_template_includes_custom_mounts(monkeypatch): ) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) monkeypatch.setattr(prompt_module, "_get_enabled_skills", lambda: []) - monkeypatch.setattr(prompt_module, "get_deferred_tools_prompt_section", lambda: "") + monkeypatch.setattr(prompt_module, "get_deferred_tools_prompt_section", lambda **kwargs: "") monkeypatch.setattr(prompt_module, "_build_acp_section", lambda: "") monkeypatch.setattr(prompt_module, "_get_memory_context", lambda agent_name=None: "") monkeypatch.setattr(prompt_module, "get_agent_soul", lambda agent_name=None: "") @@ -57,7 +66,7 @@ def test_apply_prompt_template_includes_relative_path_guidance(monkeypatch): ) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) monkeypatch.setattr(prompt_module, "_get_enabled_skills", lambda: []) - monkeypatch.setattr(prompt_module, "get_deferred_tools_prompt_section", lambda: "") + monkeypatch.setattr(prompt_module, "get_deferred_tools_prompt_section", lambda **kwargs: "") monkeypatch.setattr(prompt_module, "_build_acp_section", lambda: "") monkeypatch.setattr(prompt_module, "_get_memory_context", lambda agent_name=None: "") monkeypatch.setattr(prompt_module, "get_agent_soul", lambda agent_name=None: "") @@ -83,8 +92,8 @@ def test_refresh_skills_system_prompt_cache_async_reloads_immediately(monkeypatc ) state = {"skills": [make_skill("first-skill")]} - monkeypatch.setattr(prompt_module, "load_skills", lambda enabled_only=True: list(state["skills"])) - prompt_module._reset_skills_system_prompt_cache_state() + monkeypatch.setattr(prompt_module, "get_or_new_skill_storage", lambda **kwargs: __import__("types").SimpleNamespace(load_skills=lambda *, enabled_only: list(state["skills"]))) + _set_skills_cache_state() try: prompt_module.warm_enabled_skills_cache() @@ -95,7 +104,7 @@ def test_refresh_skills_system_prompt_cache_async_reloads_immediately(monkeypatc assert [skill.name for skill in prompt_module._get_enabled_skills()] == ["second-skill"] finally: - prompt_module._reset_skills_system_prompt_cache_state() + _set_skills_cache_state() def test_clear_cache_does_not_spawn_parallel_refresh_workers(monkeypatch, tmp_path): @@ -136,8 +145,8 @@ def test_clear_cache_does_not_spawn_parallel_refresh_workers(monkeypatch, tmp_pa return [make_skill(f"skill-{current_call}")] - monkeypatch.setattr(prompt_module, "load_skills", fake_load_skills) - prompt_module._reset_skills_system_prompt_cache_state() + monkeypatch.setattr(prompt_module, "get_or_new_skill_storage", lambda **kwargs: __import__("types").SimpleNamespace(load_skills=lambda *, enabled_only: fake_load_skills(enabled_only=enabled_only))) + _set_skills_cache_state() try: prompt_module.clear_skills_system_prompt_cache() @@ -151,7 +160,7 @@ def test_clear_cache_does_not_spawn_parallel_refresh_workers(monkeypatch, tmp_pa assert [skill.name for skill in prompt_module._get_enabled_skills()] == ["skill-2"] finally: release.set() - prompt_module._reset_skills_system_prompt_cache_state() + _set_skills_cache_state() def test_warm_enabled_skills_cache_logs_on_timeout(monkeypatch, caplog): diff --git a/backend/tests/test_lead_agent_skills.py b/backend/tests/test_lead_agent_skills.py index 441dbeee2..fe983d916 100644 --- a/backend/tests/test_lead_agent_skills.py +++ b/backend/tests/test_lead_agent_skills.py @@ -100,6 +100,24 @@ def test_get_skills_prompt_section_cache_respects_skill_evolution_toggle(monkeyp assert "Skill Self-Evolution" not in disabled_result +def test_get_skills_prompt_section_uses_explicit_config_for_enabled_skills(monkeypatch): + explicit_config = SimpleNamespace( + skills=SimpleNamespace(container_path="/mnt/alt-skills"), + skill_evolution=SimpleNamespace(enabled=False), + ) + + monkeypatch.setattr("deerflow.agents.lead_agent.prompt._get_enabled_skills", lambda: [_make_skill("global-skill")]) + monkeypatch.setattr( + "deerflow.agents.lead_agent.prompt.get_or_new_skill_storage", + lambda app_config=None, **kwargs: __import__("types").SimpleNamespace(load_skills=lambda *, enabled_only: [_make_skill("explicit-skill")] if app_config is explicit_config else []), + ) + + result = get_skills_prompt_section(app_config=explicit_config) + + assert "explicit-skill" in result + assert "global-skill" not in result + + def test_make_lead_agent_empty_skills_passed_correctly(monkeypatch): from unittest.mock import MagicMock @@ -107,7 +125,7 @@ def test_make_lead_agent_empty_skills_passed_correctly(monkeypatch): # Mock dependencies monkeypatch.setattr(lead_agent_module, "get_app_config", lambda: MagicMock()) - monkeypatch.setattr(lead_agent_module, "_resolve_model_name", lambda x=None: "default-model") + monkeypatch.setattr(lead_agent_module, "_resolve_model_name", lambda x=None, **kwargs: "default-model") monkeypatch.setattr(lead_agent_module, "create_chat_model", lambda **kwargs: "model") monkeypatch.setattr("deerflow.tools.get_available_tools", lambda **kwargs: []) monkeypatch.setattr(lead_agent_module, "_build_middlewares", lambda *args, **kwargs: []) diff --git a/backend/tests/test_llm_error_handling_middleware.py b/backend/tests/test_llm_error_handling_middleware.py index 62ca243fd..1ab395cd1 100644 --- a/backend/tests/test_llm_error_handling_middleware.py +++ b/backend/tests/test_llm_error_handling_middleware.py @@ -11,6 +11,13 @@ from langgraph.errors import GraphBubbleUp from deerflow.agents.middlewares.llm_error_handling_middleware import ( LLMErrorHandlingMiddleware, ) +from deerflow.config.app_config import AppConfig +from deerflow.config.sandbox_config import SandboxConfig + + +def _make_app_config() -> AppConfig: + """Minimal AppConfig for middleware tests; circuit_breaker uses defaults.""" + return AppConfig(sandbox=SandboxConfig(use="test")) class FakeError(Exception): @@ -31,7 +38,7 @@ class FakeError(Exception): def _build_middleware(**attrs: int) -> LLMErrorHandlingMiddleware: - middleware = LLMErrorHandlingMiddleware() + middleware = LLMErrorHandlingMiddleware(app_config=_make_app_config()) for key, value in attrs.items(): setattr(middleware, key, value) return middleware @@ -226,9 +233,7 @@ def test_circuit_breaker_trips_and_recovers(monkeypatch: pytest.MonkeyPatch) -> current_time = 1000.0 monkeypatch.setattr("time.time", lambda: current_time) - middleware = LLMErrorHandlingMiddleware() - middleware.circuit_failure_threshold = 3 - middleware.circuit_recovery_timeout_sec = 10 + middleware = _build_middleware(circuit_failure_threshold=3, circuit_recovery_timeout_sec=10) monkeypatch.setattr(middleware, "_classify_error", mock_classify_retriable) request: Any = {"messages": []} @@ -284,8 +289,7 @@ def test_circuit_breaker_does_not_trip_on_non_retriable_errors(monkeypatch: pyte waits: list[float] = [] monkeypatch.setattr("time.sleep", lambda d: waits.append(d)) - middleware = LLMErrorHandlingMiddleware() - middleware.circuit_failure_threshold = 3 + middleware = _build_middleware(circuit_failure_threshold=3) monkeypatch.setattr(middleware, "_classify_error", mock_classify_non_retriable) request: Any = {"messages": []} @@ -386,9 +390,7 @@ async def test_async_circuit_breaker_trips_and_recovers(monkeypatch: pytest.Monk current_time = 1000.0 monkeypatch.setattr("time.time", lambda: current_time) - middleware = LLMErrorHandlingMiddleware() - middleware.circuit_failure_threshold = 3 - middleware.circuit_recovery_timeout_sec = 10 + middleware = _build_middleware(circuit_failure_threshold=3, circuit_recovery_timeout_sec=10) monkeypatch.setattr(middleware, "_classify_error", mock_classify_retriable) async def async_failing_handler(request: Any) -> Any: diff --git a/backend/tests/test_local_sandbox_provider_mounts.py b/backend/tests/test_local_sandbox_provider_mounts.py index 18e180e3b..5c50a1aa0 100644 --- a/backend/tests/test_local_sandbox_provider_mounts.py +++ b/backend/tests/test_local_sandbox_provider_mounts.py @@ -1,4 +1,5 @@ import errno +from pathlib import Path from types import SimpleNamespace from unittest.mock import patch @@ -8,6 +9,13 @@ from deerflow.sandbox.local.local_sandbox import LocalSandbox, PathMapping from deerflow.sandbox.local.local_sandbox_provider import LocalSandboxProvider +def _symlink_to(target, link, *, target_is_directory=False): + try: + link.symlink_to(target, target_is_directory=target_is_directory) + except (NotImplementedError, OSError) as exc: + pytest.skip(f"symlinks are not available: {exc}") + + class TestPathMapping: def test_path_mapping_dataclass(self): mapping = PathMapping(container_path="/mnt/skills", local_path="/home/user/skills", read_only=True) @@ -29,7 +37,7 @@ class TestLocalSandboxPathResolution: ], ) resolved = sandbox._resolve_path("/mnt/skills") - assert resolved == "/home/user/skills" + assert resolved == str(Path("/home/user/skills").resolve()) def test_resolve_path_nested_path(self): sandbox = LocalSandbox( @@ -39,7 +47,7 @@ class TestLocalSandboxPathResolution: ], ) resolved = sandbox._resolve_path("/mnt/skills/agent/prompt.py") - assert resolved == "/home/user/skills/agent/prompt.py" + assert resolved == str(Path("/home/user/skills/agent/prompt.py").resolve()) def test_resolve_path_no_mapping(self): sandbox = LocalSandbox( @@ -61,7 +69,7 @@ class TestLocalSandboxPathResolution: ) resolved = sandbox._resolve_path("/mnt/skills/file.py") # Should match /mnt/skills first (longer prefix) - assert resolved == "/home/user/skills/file.py" + assert resolved == str(Path("/home/user/skills/file.py").resolve()) def test_reverse_resolve_path_exact_match(self, tmp_path): skills_dir = tmp_path / "skills" @@ -175,6 +183,157 @@ class TestReadOnlyPath: assert exc_info.value.errno == errno.EROFS +class TestSymlinkEscapes: + def test_read_file_blocks_symlink_escape_from_mount(self, tmp_path): + mount_dir = tmp_path / "mount" + mount_dir.mkdir() + outside_dir = tmp_path / "outside" + outside_dir.mkdir() + (outside_dir / "secret.txt").write_text("secret") + _symlink_to(outside_dir, mount_dir / "escape", target_is_directory=True) + + sandbox = LocalSandbox( + "test", + [ + PathMapping(container_path="/mnt/data", local_path=str(mount_dir), read_only=False), + ], + ) + + with pytest.raises(PermissionError) as exc_info: + sandbox.read_file("/mnt/data/escape/secret.txt") + + assert exc_info.value.errno == errno.EACCES + + def test_write_file_blocks_symlink_escape_from_mount(self, tmp_path): + mount_dir = tmp_path / "mount" + mount_dir.mkdir() + outside_dir = tmp_path / "outside" + outside_dir.mkdir() + victim = outside_dir / "victim.txt" + victim.write_text("original") + _symlink_to(outside_dir, mount_dir / "escape", target_is_directory=True) + + sandbox = LocalSandbox( + "test", + [ + PathMapping(container_path="/mnt/data", local_path=str(mount_dir), read_only=False), + ], + ) + + with pytest.raises(PermissionError) as exc_info: + sandbox.write_file("/mnt/data/escape/victim.txt", "changed") + + assert exc_info.value.errno == errno.EACCES + assert victim.read_text() == "original" + + def test_write_file_uses_matched_read_only_mount_for_symlink_target(self, tmp_path): + repo_dir = tmp_path / "repo" + repo_dir.mkdir() + writable_dir = repo_dir / "writable" + writable_dir.mkdir() + _symlink_to(writable_dir, repo_dir / "link-to-writable", target_is_directory=True) + + sandbox = LocalSandbox( + "test", + [ + PathMapping(container_path="/mnt/repo", local_path=str(repo_dir), read_only=True), + PathMapping(container_path="/mnt/repo/writable", local_path=str(writable_dir), read_only=False), + ], + ) + + with pytest.raises(OSError) as exc_info: + sandbox.write_file("/mnt/repo/link-to-writable/file.txt", "bypass") + + assert exc_info.value.errno == errno.EROFS + assert not (writable_dir / "file.txt").exists() + + def test_list_dir_does_not_follow_symlink_escape_from_mount(self, tmp_path): + mount_dir = tmp_path / "mount" + mount_dir.mkdir() + outside_dir = tmp_path / "outside" + outside_dir.mkdir() + (outside_dir / "secret.txt").write_text("secret") + _symlink_to(outside_dir, mount_dir / "escape", target_is_directory=True) + (mount_dir / "visible.txt").write_text("visible") + + sandbox = LocalSandbox( + "test", + [ + PathMapping(container_path="/mnt/data", local_path=str(mount_dir), read_only=False), + ], + ) + + entries = sandbox.list_dir("/mnt/data", max_depth=2) + + assert "/mnt/data/visible.txt" in entries + assert all("secret.txt" not in entry for entry in entries) + assert all("outside" not in entry for entry in entries) + + def test_list_dir_formats_internal_directory_symlink_like_directory(self, tmp_path): + mount_dir = tmp_path / "mount" + nested_dir = mount_dir / "nested" + linked_dir = nested_dir / "linked-dir" + linked_dir.mkdir(parents=True) + _symlink_to(linked_dir, mount_dir / "dir-link", target_is_directory=True) + + sandbox = LocalSandbox( + "test", + [ + PathMapping(container_path="/mnt/data", local_path=str(mount_dir), read_only=False), + ], + ) + + entries = sandbox.list_dir("/mnt/data", max_depth=1) + + assert "/mnt/data/nested/" in entries + assert "/mnt/data/nested/linked-dir/" in entries + assert "/mnt/data/dir-link" not in entries + + def test_write_file_blocks_symlink_into_nested_read_only_mount(self, tmp_path): + repo_dir = tmp_path / "repo" + repo_dir.mkdir() + protected_dir = repo_dir / "protected" + protected_dir.mkdir() + _symlink_to(protected_dir, repo_dir / "link-to-protected", target_is_directory=True) + + sandbox = LocalSandbox( + "test", + [ + PathMapping(container_path="/mnt/repo", local_path=str(repo_dir), read_only=False), + PathMapping(container_path="/mnt/repo/protected", local_path=str(protected_dir), read_only=True), + ], + ) + + with pytest.raises(OSError) as exc_info: + sandbox.write_file("/mnt/repo/link-to-protected/file.txt", "bypass") + + assert exc_info.value.errno == errno.EROFS + assert not (protected_dir / "file.txt").exists() + + def test_update_file_blocks_symlink_into_nested_read_only_mount(self, tmp_path): + repo_dir = tmp_path / "repo" + repo_dir.mkdir() + protected_dir = repo_dir / "protected" + protected_dir.mkdir() + existing = protected_dir / "file.txt" + existing.write_bytes(b"original") + _symlink_to(protected_dir, repo_dir / "link-to-protected", target_is_directory=True) + + sandbox = LocalSandbox( + "test", + [ + PathMapping(container_path="/mnt/repo", local_path=str(repo_dir), read_only=False), + PathMapping(container_path="/mnt/repo/protected", local_path=str(protected_dir), read_only=True), + ], + ) + + with pytest.raises(OSError) as exc_info: + sandbox.update_file("/mnt/repo/link-to-protected/file.txt", b"changed") + + assert exc_info.value.errno == errno.EROFS + assert existing.read_bytes() == b"original" + + class TestMultipleMounts: def test_multiple_read_write_mounts(self, tmp_path): skills_dir = tmp_path / "skills" @@ -255,7 +414,9 @@ class TestMultipleMounts: sandbox.execute_command("cat /mnt/data/test.txt") # Verify the command received the resolved local path - assert str(data_dir) in captured.get("command", "") + command = captured.get("command", []) + assert isinstance(command, list) and len(command) >= 3 + assert str(data_dir) in command[2] def test_reverse_resolve_path_does_not_match_partial_prefix(self, tmp_path): foo_dir = tmp_path / "foo" @@ -308,7 +469,7 @@ class TestLocalSandboxProviderMounts: ], ) config = SimpleNamespace( - skills=SimpleNamespace(container_path="/custom-skills", get_skills_path=lambda: skills_dir), + skills=SimpleNamespace(container_path="/custom-skills", get_skills_path=lambda: skills_dir, use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), sandbox=sandbox_config, ) @@ -330,7 +491,7 @@ class TestLocalSandboxProviderMounts: ], ) config = SimpleNamespace( - skills=SimpleNamespace(container_path="/mnt/skills", get_skills_path=lambda: skills_dir), + skills=SimpleNamespace(container_path="/mnt/skills", get_skills_path=lambda: skills_dir, use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), sandbox=sandbox_config, ) @@ -354,7 +515,7 @@ class TestLocalSandboxProviderMounts: ], ) config = SimpleNamespace( - skills=SimpleNamespace(container_path="/mnt/skills", get_skills_path=lambda: skills_dir), + skills=SimpleNamespace(container_path="/mnt/skills", get_skills_path=lambda: skills_dir, use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), sandbox=sandbox_config, ) @@ -470,7 +631,7 @@ class TestLocalSandboxProviderMounts: ], ) config = SimpleNamespace( - skills=SimpleNamespace(container_path="/mnt/skills", get_skills_path=lambda: skills_dir), + skills=SimpleNamespace(container_path="/mnt/skills", get_skills_path=lambda: skills_dir, use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), sandbox=sandbox_config, ) diff --git a/backend/tests/test_local_skill_storage_write.py b/backend/tests/test_local_skill_storage_write.py new file mode 100644 index 000000000..ce68c6e88 --- /dev/null +++ b/backend/tests/test_local_skill_storage_write.py @@ -0,0 +1,162 @@ +"""Tests for LocalSkillStorage.write_custom_skill path-traversal guards.""" + +from __future__ import annotations + +import os + +import pytest + +from deerflow.skills.storage import get_or_new_skill_storage + + +@pytest.fixture() +def storage(tmp_path): + return get_or_new_skill_storage(skills_path=str(tmp_path)) + + +@pytest.fixture() +def skill_dir(tmp_path, storage): + """Pre-create the skill directory so symlink tests can plant files inside.""" + d = tmp_path / "custom" / "demo-skill" + d.mkdir(parents=True, exist_ok=True) + return d + + +# --------------------------------------------------------------------------- +# Happy path +# --------------------------------------------------------------------------- + + +def test_write_creates_file(tmp_path, storage): + storage.write_custom_skill("demo-skill", "SKILL.md", "# hello") + assert (tmp_path / "custom" / "demo-skill" / "SKILL.md").read_text() == "# hello" + + +def test_write_creates_subdirectory(tmp_path, storage): + storage.write_custom_skill("demo-skill", "references/ref.md", "# ref") + assert (tmp_path / "custom" / "demo-skill" / "references" / "ref.md").exists() + + +def test_write_is_atomic_overwrite(tmp_path, storage): + storage.write_custom_skill("demo-skill", "SKILL.md", "first") + storage.write_custom_skill("demo-skill", "SKILL.md", "second") + assert (tmp_path / "custom" / "demo-skill" / "SKILL.md").read_text() == "second" + + +# --------------------------------------------------------------------------- +# Empty / blank path +# --------------------------------------------------------------------------- + + +def test_rejects_empty_string(storage): + with pytest.raises(ValueError, match="empty"): + storage.write_custom_skill("demo-skill", "", "x") + + +# --------------------------------------------------------------------------- +# Absolute paths +# --------------------------------------------------------------------------- + + +def test_rejects_absolute_unix_path(storage): + with pytest.raises(ValueError, match="skill directory"): + storage.write_custom_skill("demo-skill", "/etc/passwd", "x") + + +def test_rejects_absolute_path_with_skill_prefix(tmp_path, storage): + """Absolute path within skill dir: containment check passes (not a security issue). + + Python's Path(base) / "/abs/path" ignores base and returns /abs/path directly. + If that absolute path resolves within skill_dir, the write succeeds. + This is not an escape — the file lands in the correct location. + """ + absolute = str(tmp_path / "custom" / "demo-skill" / "SKILL.md") + # Does not raise; the write goes to the expected place + storage.write_custom_skill("demo-skill", absolute, "# ok") + assert (tmp_path / "custom" / "demo-skill" / "SKILL.md").read_text() == "# ok" + + +# --------------------------------------------------------------------------- +# Parent-directory traversal +# --------------------------------------------------------------------------- + + +def test_rejects_dotdot_escape(storage): + with pytest.raises(ValueError, match="skill directory"): + storage.write_custom_skill("demo-skill", "../../escaped.txt", "x") + + +def test_rejects_dotdot_sibling(storage): + with pytest.raises(ValueError, match="skill directory"): + storage.write_custom_skill("demo-skill", "../sibling/x.txt", "x") + + +def test_rejects_dotdot_in_subpath(storage): + with pytest.raises(ValueError, match="skill directory"): + storage.write_custom_skill("demo-skill", "sub/../../escape.txt", "x") + + +def test_rejects_dotdot_only(storage): + with pytest.raises(ValueError, match="skill directory"): + storage.write_custom_skill("demo-skill", "..", "x") + + +# --------------------------------------------------------------------------- +# Symlink escape +# --------------------------------------------------------------------------- + + +def test_rejects_symlink_pointing_outside(tmp_path, storage, skill_dir): + outside = tmp_path / "outside.txt" + link = skill_dir / "escape_link.txt" + os.symlink(outside, link) + with pytest.raises(ValueError, match="skill directory"): + storage.write_custom_skill("demo-skill", "escape_link.txt", "x") + + +def test_rejects_symlink_dir_pointing_outside(tmp_path, storage, skill_dir): + outside_dir = tmp_path / "outside_dir" + outside_dir.mkdir() + link_dir = skill_dir / "linked_dir" + os.symlink(outside_dir, link_dir) + with pytest.raises(ValueError, match="skill directory"): + storage.write_custom_skill("demo-skill", "linked_dir/file.txt", "x") + + +def test_allows_symlink_within_skill_dir(tmp_path, storage, skill_dir): + """A symlink that resolves inside the skill directory is allowed. + + Because target is resolved before writing, the write goes to the real file + the symlink points to (both the link and the real file end up with the new + content). + """ + real_file = skill_dir / "real.md" + real_file.write_text("real") + link = skill_dir / "alias.md" + os.symlink(real_file, link) + # Should not raise + storage.write_custom_skill("demo-skill", "alias.md", "updated") + # resolve() writes through to the real target file + assert real_file.read_text() == "updated" + assert (skill_dir / "alias.md").read_text() == "updated" + + +# --------------------------------------------------------------------------- +# Invalid skill-name traversal +# --------------------------------------------------------------------------- + + +@pytest.mark.parametrize( + "name,method_name", + [ + ("../../escaped", "get_custom_skill_dir"), + ("../../escaped", "get_custom_skill_file"), + ("../../escaped", "get_skill_history_file"), + ("../../escaped", "custom_skill_exists"), + ("../../escaped", "public_skill_exists"), + ], +) +def test_rejects_invalid_skill_name_in_path_helpers(storage, name, method_name): + method = getattr(storage, method_name) + with pytest.raises(ValueError, match="hyphen-case"): + method(name) diff --git a/backend/tests/test_logging_level_from_config.py b/backend/tests/test_logging_level_from_config.py new file mode 100644 index 000000000..c78c0c427 --- /dev/null +++ b/backend/tests/test_logging_level_from_config.py @@ -0,0 +1,91 @@ +"""Tests for ``logging_level_from_config`` and ``apply_logging_level`` (``config.yaml`` ``log_level`` mapping).""" + +import logging + +import pytest + +from deerflow.config.app_config import apply_logging_level, logging_level_from_config + + +@pytest.mark.parametrize( + ("name", "expected"), + [ + ("debug", logging.DEBUG), + ("INFO", logging.INFO), + ("warning", logging.WARNING), + ("error", logging.ERROR), + ("critical", logging.CRITICAL), + (" Debug ", logging.DEBUG), + (None, logging.INFO), + ("", logging.INFO), + ], +) +def test_logging_level_from_config_known_and_defaults(name: str | None, expected: int) -> None: + assert logging_level_from_config(name) == expected + + +def test_logging_level_from_config_unknown_falls_back_to_info() -> None: + assert logging_level_from_config("not-a-real-level-name") == logging.INFO + + +class TestApplyLoggingLevel: + """Tests for ``apply_logging_level`` — verifies deerflow/app logger and handler levels.""" + + def setup_method(self) -> None: + root = logging.root + self._original_root_level = root.level + self._original_root_handlers = list(root.handlers) + self._original_handler_levels = {handler: handler.level for handler in self._original_root_handlers} + self._original_deerflow_level = logging.getLogger("deerflow").level + self._original_app_level = logging.getLogger("app").level + + def teardown_method(self) -> None: + root = logging.root + current_handlers = list(root.handlers) + + for handler in current_handlers: + if handler not in self._original_root_handlers: + root.removeHandler(handler) + handler.close() + + for handler in list(root.handlers): + root.removeHandler(handler) + + for handler in self._original_root_handlers: + handler.setLevel(self._original_handler_levels[handler]) + root.addHandler(handler) + + root.setLevel(self._original_root_level) + logging.getLogger("deerflow").setLevel(self._original_deerflow_level) + logging.getLogger("app").setLevel(self._original_app_level) + + def test_sets_deerflow_app_logger_levels(self) -> None: + apply_logging_level("debug") + assert logging.getLogger("deerflow").level == logging.DEBUG + assert logging.getLogger("app").level == logging.DEBUG + + def test_lowers_handler_level(self) -> None: + handler = logging.StreamHandler() + handler.setLevel(logging.WARNING) + logging.root.addHandler(handler) + apply_logging_level("debug") + assert handler.level == logging.DEBUG + + def test_does_not_raise_handler_level(self) -> None: + handler = logging.StreamHandler() + handler.setLevel(logging.WARNING) + logging.root.addHandler(handler) + apply_logging_level("error") + assert handler.level == logging.WARNING + + def test_does_not_modify_root_logger(self) -> None: + logging.root.setLevel(logging.WARNING) + apply_logging_level("debug") + assert logging.root.level == logging.WARNING + apply_logging_level("error") + assert logging.root.level == logging.WARNING + + def test_defaults_to_info(self) -> None: + apply_logging_level(None) + assert logging.getLogger("deerflow").level == logging.INFO + assert logging.getLogger("app").level == logging.INFO diff --git a/backend/tests/test_mcp_custom_interceptors.py b/backend/tests/test_mcp_custom_interceptors.py new file mode 100644 index 000000000..08432de98 --- /dev/null +++ b/backend/tests/test_mcp_custom_interceptors.py @@ -0,0 +1,274 @@ +"""Tests for custom MCP tool interceptors loaded via extensions_config.json.""" + +import asyncio +from unittest.mock import AsyncMock, MagicMock, patch + +from deerflow.mcp.tools import get_mcp_tools + + +def _make_patches(*, interceptor_paths=None): + """Set up mocks for get_mcp_tools() with optional custom interceptors. + + Returns a dict of patch context managers. + """ + mock_client = MagicMock() + mock_client.get_tools = AsyncMock(return_value=[]) + + extra = {} + if interceptor_paths is not None: + extra["mcpInterceptors"] = interceptor_paths + + return { + "client_cls": patch( + "langchain_mcp_adapters.client.MultiServerMCPClient", + return_value=mock_client, + ), + "from_file": patch( + "deerflow.config.extensions_config.ExtensionsConfig.from_file", + return_value=MagicMock( + model_extra=extra, + get_enabled_mcp_servers=MagicMock(return_value={}), + ), + ), + "build_servers": patch( + "deerflow.mcp.tools.build_servers_config", + return_value={"test-server": {}}, + ), + "oauth_headers": patch( + "deerflow.mcp.tools.get_initial_oauth_headers", + new_callable=AsyncMock, + return_value={}, + ), + "oauth_interceptor": patch( + "deerflow.mcp.tools.build_oauth_tool_interceptor", + return_value=None, + ), + } + + +def _get_interceptors(mock_cls): + """Extract the tool_interceptors list passed to MultiServerMCPClient.""" + kw = mock_cls.call_args + return kw.kwargs.get("tool_interceptors") or kw[1].get("tool_interceptors", []) + + +def test_custom_interceptor_loaded_and_appended(): + """A valid interceptor builder path is resolved, called, and appended to tool_interceptors.""" + + async def fake_interceptor(request, handler): + return await handler(request) + + def fake_builder(): + return fake_interceptor + + p = _make_patches(interceptor_paths=["my_package.auth:build_interceptor"]) + + with ( + p["client_cls"] as mock_cls, + p["from_file"], + p["build_servers"], + p["oauth_headers"], + p["oauth_interceptor"], + patch("deerflow.mcp.tools.resolve_variable", return_value=fake_builder), + ): + asyncio.run(get_mcp_tools()) + + interceptors = _get_interceptors(mock_cls) + assert len(interceptors) == 1 + assert interceptors[0] is fake_interceptor + + +def test_multiple_custom_interceptors(): + """Multiple interceptor paths are all loaded in order.""" + + async def interceptor_a(request, handler): + return await handler(request) + + async def interceptor_b(request, handler): + return await handler(request) + + builders = { + "pkg.a:build_a": lambda: interceptor_a, + "pkg.b:build_b": lambda: interceptor_b, + } + + p = _make_patches(interceptor_paths=["pkg.a:build_a", "pkg.b:build_b"]) + + with ( + p["client_cls"] as mock_cls, + p["from_file"], + p["build_servers"], + p["oauth_headers"], + p["oauth_interceptor"], + patch("deerflow.mcp.tools.resolve_variable", side_effect=lambda path: builders[path]), + ): + asyncio.run(get_mcp_tools()) + + interceptors = _get_interceptors(mock_cls) + assert len(interceptors) == 2 + assert interceptors[0] is interceptor_a + assert interceptors[1] is interceptor_b + + +def test_custom_interceptor_builder_returning_none_is_skipped(): + """If a builder returns None, it is not appended to the interceptor list.""" + p = _make_patches(interceptor_paths=["pkg.noop:build_noop"]) + + with ( + p["client_cls"] as mock_cls, + p["from_file"], + p["build_servers"], + p["oauth_headers"], + p["oauth_interceptor"], + patch("deerflow.mcp.tools.resolve_variable", return_value=lambda: None), + ): + asyncio.run(get_mcp_tools()) + + assert len(_get_interceptors(mock_cls)) == 0 + + +def test_custom_interceptor_resolve_error_logs_warning_and_continues(): + """A broken interceptor path logs a warning and does not block tool loading.""" + p = _make_patches(interceptor_paths=["broken.path:does_not_exist"]) + + with ( + p["client_cls"], + p["from_file"], + p["build_servers"], + p["oauth_headers"], + p["oauth_interceptor"], + patch("deerflow.mcp.tools.resolve_variable", side_effect=ImportError("no such module")), + patch("deerflow.mcp.tools.logger.warning") as mock_warn, + ): + tools = asyncio.run(get_mcp_tools()) + + assert tools == [] + mock_warn.assert_called_once() + assert "broken.path:does_not_exist" in mock_warn.call_args[0][0] + + +def test_custom_interceptor_builder_exception_logs_warning_and_continues(): + """If the builder function itself raises, the error is caught and logged.""" + + def exploding_builder(): + raise RuntimeError("builder exploded") + + p = _make_patches(interceptor_paths=["pkg.bad:exploding_builder"]) + + with ( + p["client_cls"], + p["from_file"], + p["build_servers"], + p["oauth_headers"], + p["oauth_interceptor"], + patch("deerflow.mcp.tools.resolve_variable", return_value=exploding_builder), + patch("deerflow.mcp.tools.logger.warning") as mock_warn, + ): + tools = asyncio.run(get_mcp_tools()) + + assert tools == [] + mock_warn.assert_called_once() + assert "pkg.bad:exploding_builder" in mock_warn.call_args[0][0] + + +def test_no_mcp_interceptors_field_is_safe(): + """When mcpInterceptors is absent from config, no interceptors are added.""" + p = _make_patches(interceptor_paths=None) + + with ( + p["client_cls"] as mock_cls, + p["from_file"], + p["build_servers"], + p["oauth_headers"], + p["oauth_interceptor"], + ): + asyncio.run(get_mcp_tools()) + + assert len(_get_interceptors(mock_cls)) == 0 + + +def test_custom_interceptor_coexists_with_oauth_interceptor(): + """Custom interceptors are appended after the OAuth interceptor.""" + + async def oauth_fn(request, handler): + return await handler(request) + + async def custom_fn(request, handler): + return await handler(request) + + p = _make_patches(interceptor_paths=["pkg.custom:build_custom"]) + + with ( + p["client_cls"] as mock_cls, + p["from_file"], + p["build_servers"], + p["oauth_headers"], + patch("deerflow.mcp.tools.build_oauth_tool_interceptor", return_value=oauth_fn), + patch("deerflow.mcp.tools.resolve_variable", return_value=lambda: custom_fn), + ): + asyncio.run(get_mcp_tools()) + + interceptors = _get_interceptors(mock_cls) + assert len(interceptors) == 2 + assert interceptors[0] is oauth_fn + assert interceptors[1] is custom_fn + + +def test_mcp_interceptors_single_string_is_normalized(): + """A single string value for mcpInterceptors is normalized to a list.""" + + async def fake_interceptor(request, handler): + return await handler(request) + + p = _make_patches(interceptor_paths="pkg.single:build_it") + + with ( + p["client_cls"] as mock_cls, + p["from_file"], + p["build_servers"], + p["oauth_headers"], + p["oauth_interceptor"], + patch("deerflow.mcp.tools.resolve_variable", return_value=lambda: fake_interceptor), + ): + asyncio.run(get_mcp_tools()) + + assert len(_get_interceptors(mock_cls)) == 1 + + +def test_mcp_interceptors_invalid_type_logs_warning(): + """A non-list, non-string value for mcpInterceptors logs a warning and is skipped.""" + p = _make_patches(interceptor_paths=42) + + with ( + p["client_cls"] as mock_cls, + p["from_file"], + p["build_servers"], + p["oauth_headers"], + p["oauth_interceptor"], + patch("deerflow.mcp.tools.logger.warning") as mock_warn, + ): + asyncio.run(get_mcp_tools()) + + assert len(_get_interceptors(mock_cls)) == 0 + mock_warn.assert_called_once() + assert "must be a list" in mock_warn.call_args[0][0] + + +def test_custom_interceptor_non_callable_return_logs_warning(): + """If a builder returns a non-callable value, it is skipped with a warning.""" + p = _make_patches(interceptor_paths=["pkg.bad:returns_string"]) + + with ( + p["client_cls"] as mock_cls, + p["from_file"], + p["build_servers"], + p["oauth_headers"], + p["oauth_interceptor"], + patch("deerflow.mcp.tools.resolve_variable", return_value=lambda: "not_a_callable"), + patch("deerflow.mcp.tools.logger.warning") as mock_warn, + ): + asyncio.run(get_mcp_tools()) + + assert len(_get_interceptors(mock_cls)) == 0 + mock_warn.assert_called_once() + assert "non-callable" in mock_warn.call_args[0][0] diff --git a/backend/tests/test_memory_queue.py b/backend/tests/test_memory_queue.py index 0d991ec0c..27808b0e8 100644 --- a/backend/tests/test_memory_queue.py +++ b/backend/tests/test_memory_queue.py @@ -50,6 +50,7 @@ def test_process_queue_forwards_correction_flag_to_updater() -> None: agent_name="lead_agent", correction_detected=True, reinforcement_detected=False, + user_id=None, ) @@ -90,6 +91,7 @@ def test_process_queue_forwards_reinforcement_flag_to_updater() -> None: agent_name="lead_agent", correction_detected=False, reinforcement_detected=True, + user_id=None, ) diff --git a/backend/tests/test_memory_queue_user_isolation.py b/backend/tests/test_memory_queue_user_isolation.py new file mode 100644 index 000000000..cf068e095 --- /dev/null +++ b/backend/tests/test_memory_queue_user_isolation.py @@ -0,0 +1,39 @@ +"""Tests for user_id propagation through memory queue.""" + +from unittest.mock import MagicMock, patch + +from deerflow.agents.memory.queue import ConversationContext, MemoryUpdateQueue + + +def test_conversation_context_has_user_id(): + ctx = ConversationContext(thread_id="t1", messages=[], user_id="alice") + assert ctx.user_id == "alice" + + +def test_conversation_context_user_id_default_none(): + ctx = ConversationContext(thread_id="t1", messages=[]) + assert ctx.user_id is None + + +def test_queue_add_stores_user_id(): + q = MemoryUpdateQueue() + with patch.object(q, "_reset_timer"): + q.add(thread_id="t1", messages=["msg"], user_id="alice") + assert len(q._queue) == 1 + assert q._queue[0].user_id == "alice" + q.clear() + + +def test_queue_process_passes_user_id_to_updater(): + q = MemoryUpdateQueue() + with patch.object(q, "_reset_timer"): + q.add(thread_id="t1", messages=["msg"], user_id="alice") + + mock_updater = MagicMock() + mock_updater.update_memory.return_value = True + with patch("deerflow.agents.memory.updater.MemoryUpdater", return_value=mock_updater): + q._process_queue() + + mock_updater.update_memory.assert_called_once() + call_kwargs = mock_updater.update_memory.call_args.kwargs + assert call_kwargs["user_id"] == "alice" diff --git a/backend/tests/test_memory_router.py b/backend/tests/test_memory_router.py index 23a4f30fe..91fd1d662 100644 --- a/backend/tests/test_memory_router.py +++ b/backend/tests/test_memory_router.py @@ -258,12 +258,13 @@ def test_update_memory_fact_route_preserves_omitted_fields() -> None: ) assert response.status_code == 200 - update_fact.assert_called_once_with( - fact_id="fact_edit", - content="User prefers spaces", - category=None, - confidence=None, - ) + assert update_fact.call_count == 1 + call_kwargs = update_fact.call_args.kwargs + assert call_kwargs.get("fact_id") == "fact_edit" + assert call_kwargs.get("content") == "User prefers spaces" + assert call_kwargs.get("category") is None + assert call_kwargs.get("confidence") is None + assert "user_id" in call_kwargs assert response.json()["facts"] == updated_memory["facts"] diff --git a/backend/tests/test_memory_storage_user_isolation.py b/backend/tests/test_memory_storage_user_isolation.py new file mode 100644 index 000000000..5dd114b7e --- /dev/null +++ b/backend/tests/test_memory_storage_user_isolation.py @@ -0,0 +1,152 @@ +"""Tests for per-user memory storage isolation.""" + +from pathlib import Path +from unittest.mock import patch + +import pytest + +from deerflow.agents.memory.storage import FileMemoryStorage, create_empty_memory + + +@pytest.fixture +def base_dir(tmp_path: Path) -> Path: + return tmp_path + + +@pytest.fixture +def storage() -> FileMemoryStorage: + return FileMemoryStorage() + + +class TestUserIsolatedStorage: + def test_save_and_load_per_user(self, storage: FileMemoryStorage, base_dir: Path): + from deerflow.config.paths import Paths + + paths = Paths(base_dir) + with patch("deerflow.agents.memory.storage.get_paths", return_value=paths): + memory_a = create_empty_memory() + memory_a["user"]["workContext"]["summary"] = "User A context" + storage.save(memory_a, user_id="alice") + + memory_b = create_empty_memory() + memory_b["user"]["workContext"]["summary"] = "User B context" + storage.save(memory_b, user_id="bob") + + loaded_a = storage.load(user_id="alice") + loaded_b = storage.load(user_id="bob") + + assert loaded_a["user"]["workContext"]["summary"] == "User A context" + assert loaded_b["user"]["workContext"]["summary"] == "User B context" + + def test_user_memory_file_location(self, base_dir: Path): + from deerflow.config.paths import Paths + + paths = Paths(base_dir) + with patch("deerflow.agents.memory.storage.get_paths", return_value=paths): + s = FileMemoryStorage() + memory = create_empty_memory() + s.save(memory, user_id="alice") + expected_path = base_dir / "users" / "alice" / "memory.json" + assert expected_path.exists() + + def test_cache_isolated_per_user(self, base_dir: Path): + from deerflow.config.paths import Paths + + paths = Paths(base_dir) + with patch("deerflow.agents.memory.storage.get_paths", return_value=paths): + s = FileMemoryStorage() + memory_a = create_empty_memory() + memory_a["user"]["workContext"]["summary"] = "A" + s.save(memory_a, user_id="alice") + + memory_b = create_empty_memory() + memory_b["user"]["workContext"]["summary"] = "B" + s.save(memory_b, user_id="bob") + + loaded_a = s.load(user_id="alice") + assert loaded_a["user"]["workContext"]["summary"] == "A" + + def test_no_user_id_uses_legacy_path(self, base_dir: Path): + from deerflow.config.memory_config import MemoryConfig + from deerflow.config.paths import Paths + + paths = Paths(base_dir) + with patch("deerflow.agents.memory.storage.get_paths", return_value=paths): + with patch("deerflow.agents.memory.storage.get_memory_config", return_value=MemoryConfig(storage_path="")): + s = FileMemoryStorage() + memory = create_empty_memory() + s.save(memory, user_id=None) + expected_path = base_dir / "memory.json" + assert expected_path.exists() + + def test_user_and_legacy_do_not_interfere(self, base_dir: Path): + """user_id=None (legacy) and user_id='alice' must use different files and caches.""" + from deerflow.config.memory_config import MemoryConfig + from deerflow.config.paths import Paths + + paths = Paths(base_dir) + with patch("deerflow.agents.memory.storage.get_paths", return_value=paths): + with patch("deerflow.agents.memory.storage.get_memory_config", return_value=MemoryConfig(storage_path="")): + s = FileMemoryStorage() + + legacy_mem = create_empty_memory() + legacy_mem["user"]["workContext"]["summary"] = "legacy" + s.save(legacy_mem, user_id=None) + + user_mem = create_empty_memory() + user_mem["user"]["workContext"]["summary"] = "alice" + s.save(user_mem, user_id="alice") + + assert s.load(user_id=None)["user"]["workContext"]["summary"] == "legacy" + assert s.load(user_id="alice")["user"]["workContext"]["summary"] == "alice" + + def test_user_agent_memory_file_location(self, base_dir: Path): + """Per-user per-agent memory uses the user_agent_memory_file path.""" + from deerflow.config.paths import Paths + + paths = Paths(base_dir) + with patch("deerflow.agents.memory.storage.get_paths", return_value=paths): + s = FileMemoryStorage() + memory = create_empty_memory() + memory["user"]["workContext"]["summary"] = "agent scoped" + s.save(memory, "test-agent", user_id="alice") + expected_path = base_dir / "users" / "alice" / "agents" / "test-agent" / "memory.json" + assert expected_path.exists() + + def test_cache_key_is_user_agent_tuple(self, base_dir: Path): + """Cache keys must be (user_id, agent_name) tuples, not bare agent names.""" + from deerflow.config.paths import Paths + + paths = Paths(base_dir) + with patch("deerflow.agents.memory.storage.get_paths", return_value=paths): + s = FileMemoryStorage() + memory = create_empty_memory() + s.save(memory, user_id="alice") + # After save, cache should have tuple key + assert ("alice", None) in s._memory_cache + + def test_reload_with_user_id(self, base_dir: Path): + """reload() with user_id should force re-read from the user-scoped file.""" + from deerflow.config.paths import Paths + + paths = Paths(base_dir) + with patch("deerflow.agents.memory.storage.get_paths", return_value=paths): + s = FileMemoryStorage() + memory = create_empty_memory() + memory["user"]["workContext"]["summary"] = "initial" + s.save(memory, user_id="alice") + + # Load once to prime cache + s.load(user_id="alice") + + # Write updated content directly to file + user_file = base_dir / "users" / "alice" / "memory.json" + import json + + updated = create_empty_memory() + updated["user"]["workContext"]["summary"] = "updated" + user_file.write_text(json.dumps(updated)) + + # reload should pick up the new content + reloaded = s.reload(user_id="alice") + assert reloaded["user"]["workContext"]["summary"] == "updated" diff --git a/backend/tests/test_memory_thread_meta_isolation.py b/backend/tests/test_memory_thread_meta_isolation.py new file mode 100644 index 000000000..25c9298f0 --- /dev/null +++ b/backend/tests/test_memory_thread_meta_isolation.py @@ -0,0 +1,156 @@ +"""Owner isolation tests for MemoryThreadMetaStore. + +Mirrors the SQL-backed tests in test_owner_isolation.py but exercises +the in-memory LangGraph Store backend used when database.backend=memory. +""" + +from __future__ import annotations + +from types import SimpleNamespace + +import pytest +from langgraph.store.memory import InMemoryStore + +from deerflow.persistence.thread_meta.memory import MemoryThreadMetaStore +from deerflow.runtime.user_context import reset_current_user, set_current_user + +USER_A = SimpleNamespace(id="user-a", email="a@test.local") +USER_B = SimpleNamespace(id="user-b", email="b@test.local") + + +def _as_user(user): + class _Ctx: + def __enter__(self): + self._token = set_current_user(user) + return user + + def __exit__(self, *exc): + reset_current_user(self._token) + + return _Ctx() + + +@pytest.fixture +def store(): + return MemoryThreadMetaStore(InMemoryStore()) + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_search_isolation(store): + """search() returns only threads owned by the current user.""" + with _as_user(USER_A): + await store.create("t-alpha", display_name="A's thread") + with _as_user(USER_B): + await store.create("t-beta", display_name="B's thread") + + with _as_user(USER_A): + results = await store.search() + assert [r["thread_id"] for r in results] == ["t-alpha"] + + with _as_user(USER_B): + results = await store.search() + assert [r["thread_id"] for r in results] == ["t-beta"] + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_get_isolation(store): + """get() returns None for threads owned by another user.""" + with _as_user(USER_A): + await store.create("t-alpha", display_name="A's thread") + + with _as_user(USER_B): + assert await store.get("t-alpha") is None + + with _as_user(USER_A): + result = await store.get("t-alpha") + assert result is not None + assert result["display_name"] == "A's thread" + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_update_display_name_denied(store): + """User B cannot rename User A's thread.""" + with _as_user(USER_A): + await store.create("t-alpha", display_name="original") + + with _as_user(USER_B): + await store.update_display_name("t-alpha", "hacked") + + with _as_user(USER_A): + row = await store.get("t-alpha") + assert row is not None + assert row["display_name"] == "original" + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_update_status_denied(store): + """User B cannot change status of User A's thread.""" + with _as_user(USER_A): + await store.create("t-alpha") + + with _as_user(USER_B): + await store.update_status("t-alpha", "error") + + with _as_user(USER_A): + row = await store.get("t-alpha") + assert row is not None + assert row["status"] == "idle" + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_update_metadata_denied(store): + """User B cannot modify metadata of User A's thread.""" + with _as_user(USER_A): + await store.create("t-alpha", metadata={"key": "original"}) + + with _as_user(USER_B): + await store.update_metadata("t-alpha", {"key": "hacked"}) + + with _as_user(USER_A): + row = await store.get("t-alpha") + assert row is not None + assert row["metadata"]["key"] == "original" + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_delete_denied(store): + """User B cannot delete User A's thread.""" + with _as_user(USER_A): + await store.create("t-alpha") + + with _as_user(USER_B): + await store.delete("t-alpha") + + with _as_user(USER_A): + row = await store.get("t-alpha") + assert row is not None + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_no_context_raises(store): + """Calling methods without user context raises RuntimeError.""" + with pytest.raises(RuntimeError, match="no user context is set"): + await store.search() + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_explicit_none_bypasses_filter(store): + """user_id=None bypasses isolation (migration/CLI escape hatch).""" + with _as_user(USER_A): + await store.create("t-alpha") + with _as_user(USER_B): + await store.create("t-beta") + + all_rows = await store.search(user_id=None) + assert {r["thread_id"] for r in all_rows} == {"t-alpha", "t-beta"} + + row = await store.get("t-alpha", user_id=None) + assert row is not None diff --git a/backend/tests/test_memory_updater.py b/backend/tests/test_memory_updater.py index fce8cd0fb..03d135564 100644 --- a/backend/tests/test_memory_updater.py +++ b/backend/tests/test_memory_updater.py @@ -1,13 +1,10 @@ import asyncio from unittest.mock import AsyncMock, MagicMock, patch -import pytest - from deerflow.agents.memory.prompt import format_conversation_for_update from deerflow.agents.memory.updater import ( MemoryUpdater, _extract_text, - _run_async_update_sync, clear_memory_data, create_memory_fact, delete_memory_fact, @@ -305,8 +302,8 @@ def test_import_memory_data_saves_and_returns_imported_memory() -> None: with patch("deerflow.agents.memory.updater.get_memory_storage", return_value=mock_storage): result = import_memory_data(imported_memory) - mock_storage.save.assert_called_once_with(imported_memory, None) - mock_storage.load.assert_called_once_with(None) + mock_storage.save.assert_called_once_with(imported_memory, None, user_id=None) + mock_storage.load.assert_called_once_with(None, user_id=None) assert result == imported_memory @@ -528,6 +525,7 @@ class TestUpdateMemoryStructuredResponse: response = MagicMock() response.content = content model.ainvoke = AsyncMock(return_value=response) + model.invoke = MagicMock(return_value=response) return model def test_string_response_parses(self): @@ -551,7 +549,7 @@ class TestUpdateMemoryStructuredResponse: result = updater.update_memory([msg, ai_msg]) assert result is True - model.ainvoke.assert_awaited_once() + model.invoke.assert_called_once() def test_list_content_response_parses(self): """LLM response as list-of-blocks should be extracted, not repr'd.""" @@ -576,7 +574,8 @@ class TestUpdateMemoryStructuredResponse: assert result is True - def test_async_update_memory_uses_ainvoke(self): + def test_async_update_memory_delegates_to_sync(self): + """aupdate_memory should delegate to sync _do_update_memory_sync via to_thread.""" updater = MemoryUpdater() valid_json = '{"user": {}, "history": {}, "newFacts": [], "factsToRemove": []}' model = self._make_mock_model(valid_json) @@ -597,7 +596,9 @@ class TestUpdateMemoryStructuredResponse: result = asyncio.run(updater.aupdate_memory([msg, ai_msg])) assert result is True - model.ainvoke.assert_awaited_once() + # aupdate_memory delegates to sync path — model.invoke, not ainvoke + model.invoke.assert_called_once() + model.ainvoke.assert_not_called() def test_correction_hint_injected_when_detected(self): updater = MemoryUpdater() @@ -621,7 +622,7 @@ class TestUpdateMemoryStructuredResponse: result = updater.update_memory([msg, ai_msg], correction_detected=True) assert result is True - prompt = model.ainvoke.await_args.args[0] + prompt = model.invoke.call_args.args[0] assert "Explicit correction signals were detected" in prompt def test_correction_hint_empty_when_not_detected(self): @@ -646,7 +647,7 @@ class TestUpdateMemoryStructuredResponse: result = updater.update_memory([msg, ai_msg], correction_detected=False) assert result is True - prompt = model.ainvoke.await_args.args[0] + prompt = model.invoke.call_args.args[0] assert "Explicit correction signals were detected" not in prompt def test_sync_update_memory_wrapper_works_in_running_loop(self): @@ -674,9 +675,9 @@ class TestUpdateMemoryStructuredResponse: result = asyncio.run(run_in_loop()) assert result is True - model.ainvoke.assert_awaited_once() + model.invoke.assert_called_once() - def test_sync_update_memory_returns_false_when_bridge_submit_fails(self): + def test_sync_update_memory_returns_false_when_executor_down(self): updater = MemoryUpdater() with ( @@ -701,33 +702,67 @@ class TestUpdateMemoryStructuredResponse: assert result is False -class TestRunAsyncUpdateSync: - def test_closes_unawaited_awaitable_when_bridge_fails_before_handoff(self): - class CloseableAwaitable: - def __init__(self): - self.closed = False +class TestSyncUpdateIsolatesProviderClientPool: + """Regression tests for issue #2615. - def __await__(self): - pytest.fail("awaitable should not have been awaited") - yield + The sync ``update_memory`` path must use ``model.invoke()`` (sync HTTP) + and never touch the async provider client pool shared with the lead agent. + """ - def close(self): - self.closed = True + def test_sync_update_uses_invoke_not_ainvoke(self): + updater = MemoryUpdater() + valid_json = '{"user": {}, "history": {}, "newFacts": [], "factsToRemove": []}' + model = MagicMock() + response = MagicMock() + response.content = valid_json + model.invoke = MagicMock(return_value=response) + model.ainvoke = AsyncMock(return_value=response) - awaitable = CloseableAwaitable() - - with patch( - "deerflow.agents.memory.updater._SYNC_MEMORY_UPDATER_EXECUTOR.submit", - side_effect=RuntimeError("executor down"), + with ( + patch.object(updater, "_get_model", return_value=model), + patch("deerflow.agents.memory.updater.get_memory_config", return_value=_memory_config(enabled=True)), + patch("deerflow.agents.memory.updater.get_memory_data", return_value=_make_memory()), + patch("deerflow.agents.memory.updater.get_memory_storage", return_value=MagicMock(save=MagicMock(return_value=True))), ): + msg = MagicMock() + msg.type = "human" + msg.content = "Hello" + ai_msg = MagicMock() + ai_msg.type = "ai" + ai_msg.content = "Hi" + ai_msg.tool_calls = [] + result = updater.update_memory([msg, ai_msg]) - async def run_in_loop(): - return _run_async_update_sync(awaitable) + assert result is True + model.invoke.assert_called_once() + model.ainvoke.assert_not_called() - result = asyncio.run(run_in_loop()) + def test_no_event_loop_created_during_sync_update(self): + """Sync update must not create or destroy any event loop.""" + updater = MemoryUpdater() + valid_json = '{"user": {}, "history": {}, "newFacts": [], "factsToRemove": []}' + model = MagicMock() + response = MagicMock() + response.content = valid_json + model.invoke = MagicMock(return_value=response) - assert result is False - assert awaitable.closed is True + with ( + patch.object(updater, "_get_model", return_value=model), + patch("deerflow.agents.memory.updater.get_memory_config", return_value=_memory_config(enabled=True)), + patch("deerflow.agents.memory.updater.get_memory_data", return_value=_make_memory()), + patch("deerflow.agents.memory.updater.get_memory_storage", return_value=MagicMock(save=MagicMock(return_value=True))), + patch("asyncio.run", side_effect=AssertionError("asyncio.run must not be called from sync update path")), + ): + msg = MagicMock() + msg.type = "human" + msg.content = "Hello" + ai_msg = MagicMock() + ai_msg.type = "ai" + ai_msg.content = "Hi" + ai_msg.tool_calls = [] + result = updater.update_memory([msg, ai_msg]) + + assert result is True class TestFactDeduplicationCaseInsensitive: @@ -804,6 +839,7 @@ class TestReinforcementHint: response = MagicMock() response.content = f"```json\n{json_response}\n```" model.ainvoke = AsyncMock(return_value=response) + model.invoke = MagicMock(return_value=response) return model def test_reinforcement_hint_injected_when_detected(self): @@ -828,7 +864,7 @@ class TestReinforcementHint: result = updater.update_memory([msg, ai_msg], reinforcement_detected=True) assert result is True - prompt = model.ainvoke.await_args.args[0] + prompt = model.invoke.call_args.args[0] assert "Positive reinforcement signals were detected" in prompt def test_reinforcement_hint_absent_when_not_detected(self): @@ -853,7 +889,7 @@ class TestReinforcementHint: result = updater.update_memory([msg, ai_msg], reinforcement_detected=False) assert result is True - prompt = model.ainvoke.await_args.args[0] + prompt = model.invoke.call_args.args[0] assert "Positive reinforcement signals were detected" not in prompt def test_both_hints_present_when_both_detected(self): @@ -878,7 +914,7 @@ class TestReinforcementHint: result = updater.update_memory([msg, ai_msg], correction_detected=True, reinforcement_detected=True) assert result is True - prompt = model.ainvoke.await_args.args[0] + prompt = model.invoke.call_args.args[0] assert "Explicit correction signals were detected" in prompt assert "Positive reinforcement signals were detected" in prompt @@ -907,11 +943,11 @@ class TestFinalizeCacheIsolation: ) mock_response = MagicMock() mock_response.content = new_fact_json - mock_model = AsyncMock() - mock_model.ainvoke = AsyncMock(return_value=mock_response) + mock_model = MagicMock() + mock_model.invoke = MagicMock(return_value=mock_response) saved_objects: list[dict] = [] - save_mock = MagicMock(side_effect=lambda m, a=None: saved_objects.append(m) or False) # always fails + save_mock = MagicMock(side_effect=lambda m, a=None, **_: saved_objects.append(m) or False) # always fails with ( patch.object(updater, "_get_model", return_value=mock_model), @@ -928,6 +964,85 @@ class TestFinalizeCacheIsolation: ai_msg.tool_calls = [] updater.update_memory([msg, ai_msg], thread_id="t1") + # save_mock must have been exercised — otherwise the deepcopy-on-save-failure path isn't covered + save_mock.assert_called_once() + assert len(saved_objects) == 1, "save must have been called with the updated memory object" + # original_memory must not have been mutated — deepcopy isolates the mutation assert len(original_memory["facts"]) == 1, "original_memory must not be mutated by _apply_updates" assert original_memory["facts"][0]["content"] == "original" + + +class TestUserIdForwarding: + """Regression: user_id must flow through the entire sync update path. + + When MemoryUpdateQueue captures context.user_id and passes it into + update_memory(..., user_id=context.user_id), the sync path must forward + it into _prepare_update_prompt → get_memory_data() and + _finalize_update → save(), so per-user memory isolation is maintained. + """ + + @staticmethod + def _make_mock_model(content): + model = MagicMock() + response = MagicMock() + response.content = content + model.invoke = MagicMock(return_value=response) + return model + + def test_sync_update_forwards_user_id_to_load_and_save(self): + """update_memory must pass user_id to get_memory_data and storage.save.""" + updater = MemoryUpdater() + valid_json = '{"user": {}, "history": {}, "newFacts": [], "factsToRemove": []}' + model = self._make_mock_model(valid_json) + mock_storage = MagicMock() + mock_storage.save = MagicMock(return_value=True) + + with ( + patch.object(updater, "_get_model", return_value=model), + patch("deerflow.agents.memory.updater.get_memory_config", return_value=_memory_config(enabled=True)), + patch("deerflow.agents.memory.updater.get_memory_data", return_value=_make_memory()) as mock_load, + patch("deerflow.agents.memory.updater.get_memory_storage", return_value=mock_storage), + ): + msg = MagicMock() + msg.type = "human" + msg.content = "Hello" + ai_msg = MagicMock() + ai_msg.type = "ai" + ai_msg.content = "Hi" + ai_msg.tool_calls = [] + result = updater.update_memory([msg, ai_msg], user_id="user-42") + + assert result is True + mock_load.assert_called_once_with(None, user_id="user-42") + mock_storage.save.assert_called_once() + save_call = mock_storage.save.call_args + assert save_call.kwargs.get("user_id") == "user-42" or (len(save_call.args) > 2 and save_call.args[2] == "user-42") + + def test_async_update_forwards_user_id_to_load_and_save(self): + """aupdate_memory must pass user_id through to the sync delegate.""" + updater = MemoryUpdater() + valid_json = '{"user": {}, "history": {}, "newFacts": [], "factsToRemove": []}' + model = self._make_mock_model(valid_json) + mock_storage = MagicMock() + mock_storage.save = MagicMock(return_value=True) + + with ( + patch.object(updater, "_get_model", return_value=model), + patch("deerflow.agents.memory.updater.get_memory_config", return_value=_memory_config(enabled=True)), + patch("deerflow.agents.memory.updater.get_memory_data", return_value=_make_memory()) as mock_load, + patch("deerflow.agents.memory.updater.get_memory_storage", return_value=mock_storage), + ): + msg = MagicMock() + msg.type = "human" + msg.content = "Hello" + ai_msg = MagicMock() + ai_msg.type = "ai" + ai_msg.content = "Hi" + ai_msg.tool_calls = [] + result = asyncio.run(updater.aupdate_memory([msg, ai_msg], user_id="user-99")) + + assert result is True + mock_load.assert_called_once_with(None, user_id="user-99") + save_call = mock_storage.save.call_args + assert save_call.kwargs.get("user_id") == "user-99" or (len(save_call.args) > 2 and save_call.args[2] == "user-99") diff --git a/backend/tests/test_memory_updater_user_isolation.py b/backend/tests/test_memory_updater_user_isolation.py new file mode 100644 index 000000000..da8a444fe --- /dev/null +++ b/backend/tests/test_memory_updater_user_isolation.py @@ -0,0 +1,30 @@ +"""Tests for user_id propagation in memory updater.""" + +from unittest.mock import MagicMock, patch + +from deerflow.agents.memory.updater import _save_memory_to_file, clear_memory_data, get_memory_data + + +def test_get_memory_data_passes_user_id(): + mock_storage = MagicMock() + mock_storage.load.return_value = {"version": "1.0"} + with patch("deerflow.agents.memory.updater.get_memory_storage", return_value=mock_storage): + get_memory_data(user_id="alice") + mock_storage.load.assert_called_once_with(None, user_id="alice") + + +def test_save_memory_passes_user_id(): + mock_storage = MagicMock() + mock_storage.save.return_value = True + with patch("deerflow.agents.memory.updater.get_memory_storage", return_value=mock_storage): + _save_memory_to_file({"version": "1.0"}, user_id="bob") + mock_storage.save.assert_called_once_with({"version": "1.0"}, None, user_id="bob") + + +def test_clear_memory_data_passes_user_id(): + mock_storage = MagicMock() + mock_storage.save.return_value = True + with patch("deerflow.agents.memory.updater.get_memory_storage", return_value=mock_storage): + clear_memory_data(user_id="charlie") + # Verify save was called with user_id + assert mock_storage.save.call_args.kwargs["user_id"] == "charlie" diff --git a/backend/tests/test_migration_user_isolation.py b/backend/tests/test_migration_user_isolation.py new file mode 100644 index 000000000..dbb20bdd8 --- /dev/null +++ b/backend/tests/test_migration_user_isolation.py @@ -0,0 +1,127 @@ +"""Tests for per-user data migration.""" + +import json +from pathlib import Path + +import pytest + +from deerflow.config.paths import Paths + + +@pytest.fixture +def base_dir(tmp_path: Path) -> Path: + return tmp_path + + +@pytest.fixture +def paths(base_dir: Path) -> Paths: + return Paths(base_dir) + + +class TestMigrateThreadDirs: + def test_moves_thread_to_user_dir(self, base_dir: Path, paths: Paths): + legacy = base_dir / "threads" / "t1" / "user-data" / "workspace" + legacy.mkdir(parents=True) + (legacy / "file.txt").write_text("hello") + + from scripts.migrate_user_isolation import migrate_thread_dirs + + migrate_thread_dirs(paths, thread_owner_map={"t1": "alice"}) + + expected = base_dir / "users" / "alice" / "threads" / "t1" / "user-data" / "workspace" / "file.txt" + assert expected.exists() + assert expected.read_text() == "hello" + assert not (base_dir / "threads" / "t1").exists() + + def test_unowned_thread_goes_to_default(self, base_dir: Path, paths: Paths): + legacy = base_dir / "threads" / "t2" / "user-data" / "workspace" + legacy.mkdir(parents=True) + + from scripts.migrate_user_isolation import migrate_thread_dirs + + migrate_thread_dirs(paths, thread_owner_map={}) + + expected = base_dir / "users" / "default" / "threads" / "t2" + assert expected.exists() + + def test_idempotent_skip_already_migrated(self, base_dir: Path, paths: Paths): + new_dir = base_dir / "users" / "alice" / "threads" / "t1" / "user-data" / "workspace" + new_dir.mkdir(parents=True) + + from scripts.migrate_user_isolation import migrate_thread_dirs + + migrate_thread_dirs(paths, thread_owner_map={"t1": "alice"}) + assert new_dir.exists() + + def test_conflict_preserved(self, base_dir: Path, paths: Paths): + legacy = base_dir / "threads" / "t1" / "user-data" / "workspace" + legacy.mkdir(parents=True) + (legacy / "old.txt").write_text("old") + + dest = base_dir / "users" / "alice" / "threads" / "t1" / "user-data" / "workspace" + dest.mkdir(parents=True) + (dest / "new.txt").write_text("new") + + from scripts.migrate_user_isolation import migrate_thread_dirs + + migrate_thread_dirs(paths, thread_owner_map={"t1": "alice"}) + + assert (dest / "new.txt").read_text() == "new" + conflicts = base_dir / "migration-conflicts" / "t1" + assert conflicts.exists() + + def test_cleans_up_empty_legacy_dir(self, base_dir: Path, paths: Paths): + legacy = base_dir / "threads" / "t1" / "user-data" + legacy.mkdir(parents=True) + + from scripts.migrate_user_isolation import migrate_thread_dirs + + migrate_thread_dirs(paths, thread_owner_map={}) + + assert not (base_dir / "threads").exists() + + def test_dry_run_does_not_move(self, base_dir: Path, paths: Paths): + legacy = base_dir / "threads" / "t1" / "user-data" + legacy.mkdir(parents=True) + + from scripts.migrate_user_isolation import migrate_thread_dirs + + report = migrate_thread_dirs(paths, thread_owner_map={"t1": "alice"}, dry_run=True) + + assert len(report) == 1 + assert (base_dir / "threads" / "t1").exists() # not moved + assert not (base_dir / "users" / "alice" / "threads" / "t1").exists() + + +class TestMigrateMemory: + def test_moves_global_memory(self, base_dir: Path, paths: Paths): + legacy_mem = base_dir / "memory.json" + legacy_mem.write_text(json.dumps({"version": "1.0", "facts": []})) + + from scripts.migrate_user_isolation import migrate_memory + + migrate_memory(paths, user_id="default") + + expected = base_dir / "users" / "default" / "memory.json" + assert expected.exists() + assert not legacy_mem.exists() + + def test_skips_if_destination_exists(self, base_dir: Path, paths: Paths): + legacy_mem = base_dir / "memory.json" + legacy_mem.write_text(json.dumps({"version": "old"})) + + dest = base_dir / "users" / "default" / "memory.json" + dest.parent.mkdir(parents=True) + dest.write_text(json.dumps({"version": "new"})) + + from scripts.migrate_user_isolation import migrate_memory + + migrate_memory(paths, user_id="default") + + assert json.loads(dest.read_text())["version"] == "new" + assert (base_dir / "memory.legacy.json").exists() + + def test_no_legacy_memory_is_noop(self, base_dir: Path, paths: Paths): + from scripts.migrate_user_isolation import migrate_memory + + migrate_memory(paths, user_id="default") # should not raise diff --git a/backend/tests/test_mindie_provider.py b/backend/tests/test_mindie_provider.py new file mode 100644 index 000000000..78bc0d972 --- /dev/null +++ b/backend/tests/test_mindie_provider.py @@ -0,0 +1,478 @@ +""" +Unit tests for MindIEChatModel adapter. +""" + +from unittest.mock import AsyncMock, patch + +import pytest +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage, ToolMessage +from langchain_core.outputs import ChatGeneration, ChatResult + +# ── Import the module under test ────────────────────────────────────────────── +from deerflow.models.mindie_provider import ( + MindIEChatModel, + _fix_messages, + _parse_xml_tool_call_to_dict, +) + +# ═════════════════════════════════════════════════════════════════════════════ +# Helpers +# ═════════════════════════════════════════════════════════════════════════════ + + +def _make_chat_result(content: str, tool_calls=None) -> ChatResult: + msg = AIMessage(content=content) + if tool_calls: + msg.tool_calls = tool_calls + gen = ChatGeneration(message=msg) + return ChatResult(generations=[gen]) + + +# ═════════════════════════════════════════════════════════════════════════════ +# 1. _fix_messages +# ═════════════════════════════════════════════════════════════════════════════ + + +class TestFixMessages: + # ── list content → str ──────────────────────────────────────────────────── + + def test_list_content_extracted_to_str(self): + msg = HumanMessage( + content=[ + {"type": "text", "text": "Hello"}, + {"type": "text", "text": " world"}, + ] + ) + result = _fix_messages([msg]) + assert result[0].content == "Hello world" + + def test_list_content_ignores_non_text_blocks(self): + msg = HumanMessage( + content=[ + {"type": "image_url", "image_url": "http://x.com/img.png"}, + {"type": "text", "text": "caption"}, + ] + ) + result = _fix_messages([msg]) + assert result[0].content == "caption" + + def test_empty_list_content_becomes_space(self): + msg = HumanMessage(content=[]) + result = _fix_messages([msg]) + assert result[0].content == " " + + # ── plain str content ───────────────────────────────────────────────────── + + def test_plain_string_content_preserved(self): + msg = HumanMessage(content="hi there") + result = _fix_messages([msg]) + assert result[0].content == "hi there" + + def test_empty_string_content_becomes_space(self): + msg = HumanMessage(content="") + result = _fix_messages([msg]) + assert result[0].content == " " + + # ── AIMessage with tool_calls → XML ─────────────────────────────────────── + + def test_ai_message_with_tool_calls_serialised_to_xml(self): + msg = AIMessage( + content="Sure", + tool_calls=[ + { + "name": "get_weather", + "args": {"city": "London"}, + "id": "call_abc", + } + ], + ) + result = _fix_messages([msg]) + out = result[0] + assert isinstance(out, AIMessage) + assert "" in out.content + assert "" in out.content + assert "London" in out.content + assert not getattr(out, "tool_calls", []) + + def test_ai_message_text_preserved_before_xml(self): + msg = AIMessage( + content="Here you go", + tool_calls=[{"name": "search", "args": {"q": "pytest"}, "id": "x"}], + ) + result = _fix_messages([msg]) + assert result[0].content.startswith("Here you go") + + def test_ai_message_multiple_tool_calls(self): + msg = AIMessage( + content="", + tool_calls=[ + {"name": "tool_a", "args": {"x": 1}, "id": "id1"}, + {"name": "tool_b", "args": {"y": 2}, "id": "id2"}, + ], + ) + result = _fix_messages([msg]) + content = result[0].content + assert content.count("") == 2 + assert "" in content + assert "" in content + + def test_ai_message_tool_args_are_xml_escaped(self): + msg = AIMessage( + content="", + tool_calls=[ + { + "name": "fn<&>", + "args": {"k<&>": "v<&>"}, + "id": "id1", + } + ], + ) + result = _fix_messages([msg]) + content = result[0].content + assert "" in content + assert "v<&>" in content + + # ── ToolMessage → HumanMessage ──────────────────────────────────────────── + + def test_tool_message_becomes_human_message(self): + msg = ToolMessage(content="42 degrees", tool_call_id="call_abc") + result = _fix_messages([msg]) + out = result[0] + assert isinstance(out, HumanMessage) + assert "" in out.content + assert "42 degrees" in out.content + + def test_tool_message_with_list_content(self): + msg = ToolMessage( + content=[{"type": "text", "text": "result"}], + tool_call_id="call_xyz", + ) + result = _fix_messages([msg]) + assert isinstance(result[0], HumanMessage) + assert "result" in result[0].content + + # ── Mixed message list ──────────────────────────────────────────────────── + + def test_mixed_message_types_ordering_preserved(self): + msgs = [ + HumanMessage(content="q"), + AIMessage(content="a"), + ToolMessage(content="tool out", tool_call_id="c1"), + HumanMessage(content="follow up"), + ] + result = _fix_messages(msgs) + assert len(result) == 4 + assert isinstance(result[2], HumanMessage) + assert result[3].content == "follow up" + + # ── SystemMessage pass-through ──────────────────────────────────────────── + + def test_system_message_passed_through_unchanged(self): + msg = SystemMessage(content="You are helpful.") + result = _fix_messages([msg]) + assert result[0].content == "You are helpful." + + +# ═════════════════════════════════════════════════════════════════════════════ +# 2. _parse_xml_tool_call_to_dict +# ═════════════════════════════════════════════════════════════════════════════ + + +class TestParseXmlToolCalls: + def test_no_tool_call_returns_original(self): + content = "Just a normal reply." + clean, calls = _parse_xml_tool_call_to_dict(content) + assert clean == content + assert calls == [] + + def test_single_tool_call_parsed(self): + content = " pytest " + clean, calls = _parse_xml_tool_call_to_dict(content) + assert clean == "" + assert len(calls) == 1 + assert calls[0]["name"] == "search" + assert calls[0]["args"]["query"] == "pytest" + assert calls[0]["id"].startswith("call_") + + def test_multiple_tool_calls_parsed(self): + content = "12" + _, calls = _parse_xml_tool_call_to_dict(content) + assert len(calls) == 2 + assert calls[0]["name"] == "a" + assert calls[1]["name"] == "b" + + def test_nested_tool_call_blocks_do_not_break_parsing(self): + content = "12" + clean, calls = _parse_xml_tool_call_to_dict(content) + assert clean == "" + assert len(calls) == 1 + assert calls[0]["name"] == "outer" + assert calls[0]["args"] == {"q": 1} + assert "x" not in calls[0]["args"] + + def test_text_before_tool_call_preserved(self): + content = "Here is the answer.\nv" + clean, calls = _parse_xml_tool_call_to_dict(content) + assert clean == "Here is the answer." + assert len(calls) == 1 + + def test_integer_param_deserialised(self): + content = "42" + _, calls = _parse_xml_tool_call_to_dict(content) + assert calls[0]["args"]["n"] == 42 + + def test_list_param_deserialised(self): + content = '["a","b"]' + _, calls = _parse_xml_tool_call_to_dict(content) + assert calls[0]["args"]["lst"] == ["a", "b"] + + def test_dict_param_deserialised(self): + content = '{"k": 1}' + _, calls = _parse_xml_tool_call_to_dict(content) + assert calls[0]["args"]["d"] == {"k": 1} + + def test_bool_param_deserialised(self): + content = "true" + _, calls = _parse_xml_tool_call_to_dict(content) + assert calls[0]["args"]["flag"] is True + + def test_malformed_param_stays_string(self): + content = "{broken json" + _, calls = _parse_xml_tool_call_to_dict(content) + assert calls[0]["args"]["bad"] == "{broken json" + + def test_non_string_input_returned_as_is(self): + result = _parse_xml_tool_call_to_dict(None) + assert result == (None, []) + + def test_unique_ids_generated(self): + block = "v" + _, c1 = _parse_xml_tool_call_to_dict(block) + _, c2 = _parse_xml_tool_call_to_dict(block) + assert c1[0]["id"] != c2[0]["id"] + + def test_escaped_entities_are_unescaped(self): + content = "v<&>" + _, calls = _parse_xml_tool_call_to_dict(content) + assert calls[0]["name"] == "fn<&>" + assert calls[0]["args"]["k<&>"] == "v<&>" + + +# ═════════════════════════════════════════════════════════════════════════════ +# 3. MindIEChatModel._patch_result_with_tools +# ═════════════════════════════════════════════════════════════════════════════ + + +class TestPatchResult: + def _model(self): + with patch.object(MindIEChatModel, "__init__", return_value=None): + m = MindIEChatModel.__new__(MindIEChatModel) + return m + + def test_escaped_newlines_fixed(self): + model = self._model() + result = _make_chat_result("line1\\nline2") + patched = model._patch_result_with_tools(result) + assert patched.generations[0].message.content == "line1\nline2" + + def test_escaped_newlines_inside_code_fence_preserved(self): + model = self._model() + result = _make_chat_result('text\\n```json\n{"k":"a\\\\nb"}\n```\\nend') + patched = model._patch_result_with_tools(result) + assert patched.generations[0].message.content == 'text\n```json\n{"k":"a\\\\nb"}\n```\nend' + + def test_xml_tool_calls_extracted(self): + model = self._model() + content = "1+1" + result = _make_chat_result(content) + patched = model._patch_result_with_tools(result) + msg = patched.generations[0].message + assert msg.content == "" + assert len(msg.tool_calls) == 1 + assert msg.tool_calls[0]["name"] == "calc" + + def test_patch_result_appends_to_existing_tool_calls(self): + model = self._model() + existing = [{"name": "existing", "args": {}, "id": "e1"}] + content = "v" + result = _make_chat_result(content, tool_calls=existing) + patched = model._patch_result_with_tools(result) + msg = patched.generations[0].message + assert len(msg.tool_calls) == 2 + names = [tc["name"] for tc in msg.tool_calls] + assert "existing" in names + assert "new_tool" in names + + def test_no_tool_call_content_unchanged(self): + model = self._model() + result = _make_chat_result("plain reply") + patched = model._patch_result_with_tools(result) + assert patched.generations[0].message.content == "plain reply" + + def test_non_string_content_skipped(self): + model = self._model() + msg = AIMessage(content=[{"type": "text", "text": "hi"}]) + gen = ChatGeneration(message=msg) + result = ChatResult(generations=[gen]) + patched = model._patch_result_with_tools(result) + assert patched is not None + + +class TestMindIEInit: + def test_timeout_kwargs_are_normalized(self): + captured = {} + + def fake_init(self, **kwargs): + captured.update(kwargs) + + with patch("deerflow.models.mindie_provider.ChatOpenAI.__init__", new=fake_init): + MindIEChatModel( + model="mindie-test", + api_key="test-key", + connect_timeout=1.0, + read_timeout=2.0, + write_timeout=3.0, + pool_timeout=4.0, + ) + + timeout = captured.get("timeout") + assert timeout is not None + assert timeout.connect == 1.0 + assert timeout.read == 2.0 + assert timeout.write == 3.0 + assert timeout.pool == 4.0 + + def test_explicit_timeout_takes_precedence(self): + captured = {} + + def fake_init(self, **kwargs): + captured.update(kwargs) + + with patch("deerflow.models.mindie_provider.ChatOpenAI.__init__", new=fake_init): + MindIEChatModel( + model="mindie-test", + api_key="test-key", + timeout=9.0, + connect_timeout=1.0, + read_timeout=2.0, + write_timeout=3.0, + pool_timeout=4.0, + ) + + assert captured.get("timeout") == 9.0 + + +# ═════════════════════════════════════════════════════════════════════════════ +# 4. MindIEChatModel._generate (sync) +# ═════════════════════════════════════════════════════════════════════════════ + + +class TestGenerate: + def test_generate_calls_fix_messages_and_patch(self): + with patch("deerflow.models.mindie_provider.ChatOpenAI._generate") as mock_super_gen, patch.object(MindIEChatModel, "__init__", return_value=None): + mock_super_gen.return_value = _make_chat_result("hello") + model = MindIEChatModel.__new__(MindIEChatModel) + + msgs = [HumanMessage(content="ping")] + result = model._generate(msgs) + + assert mock_super_gen.called + called_msgs = mock_super_gen.call_args[0][0] + assert all(isinstance(m.content, str) for m in called_msgs) + assert result.generations[0].message.content == "hello" + + +# ═════════════════════════════════════════════════════════════════════════════ +# 5. MindIEChatModel._agenerate (async) +# ═════════════════════════════════════════════════════════════════════════════ + + +class TestAGenerate: + @pytest.mark.asyncio + async def test_agenerate_patches_result(self): + with patch("deerflow.models.mindie_provider.ChatOpenAI._agenerate", new_callable=AsyncMock) as mock_ag, patch.object(MindIEChatModel, "__init__", return_value=None): + mock_ag.return_value = _make_chat_result("world\\nfoo") + model = MindIEChatModel.__new__(MindIEChatModel) + + result = await model._agenerate([HumanMessage(content="hi")]) + assert result.generations[0].message.content == "world\nfoo" + + +# ═════════════════════════════════════════════════════════════════════════════ +# 6. MindIEChatModel._astream (async generator) +# ═════════════════════════════════════════════════════════════════════════════ + + +class TestAStream: + async def _collect(self, gen): + chunks = [] + async for chunk in gen: + chunks.append(chunk) + return chunks + + @pytest.mark.asyncio + async def test_no_tools_uses_real_stream(self): + from langchain_core.messages import AIMessageChunk + from langchain_core.outputs import ChatGenerationChunk + + async def fake_stream(*args, **kwargs): + for char in ["hel", "lo"]: + yield ChatGenerationChunk(message=AIMessageChunk(content=char)) + + with patch("deerflow.models.mindie_provider.ChatOpenAI._astream", side_effect=fake_stream), patch.object(MindIEChatModel, "__init__", return_value=None): + model = MindIEChatModel.__new__(MindIEChatModel) + chunks = await self._collect(model._astream([HumanMessage(content="hi")])) + + assert "".join(c.message.content for c in chunks) == "hello" + + @pytest.mark.asyncio + async def test_no_tools_fixes_escaped_newlines_in_stream(self): + from langchain_core.messages import AIMessageChunk + from langchain_core.outputs import ChatGenerationChunk + + async def fake_stream(*args, **kwargs): + yield ChatGenerationChunk(message=AIMessageChunk(content="a\\nb")) + + with patch("deerflow.models.mindie_provider.ChatOpenAI._astream", side_effect=fake_stream), patch.object(MindIEChatModel, "__init__", return_value=None): + model = MindIEChatModel.__new__(MindIEChatModel) + chunks = await self._collect(model._astream([HumanMessage(content="x")])) + + assert chunks[0].message.content == "a\nb" + + @pytest.mark.asyncio + async def test_with_tools_fake_streams_text_in_chunks(self): + with patch.object(MindIEChatModel, "_agenerate", new_callable=AsyncMock) as mock_ag, patch.object(MindIEChatModel, "__init__", return_value=None): + long_text = "A" * 50 + mock_ag.return_value = _make_chat_result(long_text) + model = MindIEChatModel.__new__(MindIEChatModel) + + chunks = await self._collect(model._astream([HumanMessage(content="q")], tools=[{"type": "function", "function": {"name": "dummy"}}])) + + full = "".join(c.message.content for c in chunks) + assert full == long_text + assert len(chunks) > 1 + + @pytest.mark.asyncio + async def test_with_tools_emits_tool_call_chunk(self): + + tool_calls = [{"name": "fn", "args": {}, "id": "c1"}] + with patch.object(MindIEChatModel, "_agenerate", new_callable=AsyncMock) as mock_ag, patch.object(MindIEChatModel, "__init__", return_value=None): + mock_ag.return_value = _make_chat_result("ok", tool_calls=tool_calls) + model = MindIEChatModel.__new__(MindIEChatModel) + + chunks = await self._collect(model._astream([HumanMessage(content="q")], tools=[{"type": "function", "function": {"name": "fn"}}])) + + tool_chunks = [c for c in chunks if getattr(c.message, "tool_calls", [])] + assert tool_chunks, "No chunk carried tool_calls" + assert tool_chunks[-1].message.tool_calls[0]["name"] == "fn" + + @pytest.mark.asyncio + async def test_with_tools_empty_text_still_emits_tool_chunk(self): + tool_calls = [{"name": "x", "args": {}, "id": "c2"}] + with patch.object(MindIEChatModel, "_agenerate", new_callable=AsyncMock) as mock_ag, patch.object(MindIEChatModel, "__init__", return_value=None): + mock_ag.return_value = _make_chat_result("", tool_calls=tool_calls) + model = MindIEChatModel.__new__(MindIEChatModel) + + chunks = await self._collect(model._astream([HumanMessage(content="q")], tools=[{"type": "function", "function": {"name": "x"}}])) + + assert any(getattr(c.message, "tool_calls", []) for c in chunks) diff --git a/backend/tests/test_model_factory.py b/backend/tests/test_model_factory.py index b7badb991..c8dbe0791 100644 --- a/backend/tests/test_model_factory.py +++ b/backend/tests/test_model_factory.py @@ -886,6 +886,84 @@ def test_thinking_disabled_vllm_enable_thinking_format(monkeypatch): assert captured.get("reasoning_effort") is None +# --------------------------------------------------------------------------- +# stream_usage injection +# --------------------------------------------------------------------------- + + +class _FakeWithStreamUsage(FakeChatModel): + """Fake model that declares stream_usage in model_fields (like BaseChatOpenAI).""" + + stream_usage: bool | None = None + + +def test_stream_usage_injected_for_openai_compatible_model(monkeypatch): + """Factory should set stream_usage=True for models with stream_usage field.""" + cfg = _make_app_config([_make_model("deepseek", use="langchain_deepseek:ChatDeepSeek")]) + _patch_factory(monkeypatch, cfg, model_class=_FakeWithStreamUsage) + + captured: dict = {} + + class CapturingModel(_FakeWithStreamUsage): + def __init__(self, **kwargs): + captured.update(kwargs) + BaseChatModel.__init__(self, **kwargs) + + monkeypatch.setattr(factory_module, "resolve_class", lambda path, base: CapturingModel) + + factory_module.create_chat_model(name="deepseek") + + assert captured.get("stream_usage") is True + + +def test_stream_usage_not_injected_for_non_openai_model(monkeypatch): + """Factory should NOT inject stream_usage for models without the field.""" + cfg = _make_app_config([_make_model("claude", use="langchain_anthropic:ChatAnthropic")]) + _patch_factory(monkeypatch, cfg) + + captured: dict = {} + + class CapturingModel(FakeChatModel): + def __init__(self, **kwargs): + captured.update(kwargs) + BaseChatModel.__init__(self, **kwargs) + + monkeypatch.setattr(factory_module, "resolve_class", lambda path, base: CapturingModel) + + factory_module.create_chat_model(name="claude") + + assert "stream_usage" not in captured + + +def test_stream_usage_not_overridden_when_explicitly_set_in_config(monkeypatch): + """If config dumps stream_usage=False, factory should respect it.""" + cfg = _make_app_config([_make_model("deepseek", use="langchain_deepseek:ChatDeepSeek")]) + _patch_factory(monkeypatch, cfg, model_class=_FakeWithStreamUsage) + + captured: dict = {} + + class CapturingModel(_FakeWithStreamUsage): + def __init__(self, **kwargs): + captured.update(kwargs) + BaseChatModel.__init__(self, **kwargs) + + monkeypatch.setattr(factory_module, "resolve_class", lambda path, base: CapturingModel) + + # Simulate config having stream_usage explicitly set by patching model_dump + original_get_model_config = cfg.get_model_config + + def patched_get_model_config(name): + mc = original_get_model_config(name) + mc.stream_usage = False # type: ignore[attr-defined] + return mc + + monkeypatch.setattr(cfg, "get_model_config", patched_get_model_config) + + factory_module.create_chat_model(name="deepseek") + + assert captured.get("stream_usage") is False + + def test_openai_responses_api_settings_are_passed_to_chatopenai(monkeypatch): model = ModelConfig( name="gpt-5-responses", diff --git a/backend/tests/test_owner_isolation.py b/backend/tests/test_owner_isolation.py new file mode 100644 index 000000000..33d21f3e3 --- /dev/null +++ b/backend/tests/test_owner_isolation.py @@ -0,0 +1,465 @@ +"""Cross-user isolation tests — non-negotiable safety gate. + +Mirrors TC-API-17..20 from backend/docs/AUTH_TEST_PLAN.md. A failure +here means users can see each other's data; PR must not merge. + +Architecture note +----------------- +These tests bypass the HTTP layer and exercise the storage-layer +owner filter directly by switching the ``user_context`` contextvar +between two users. The safety property under test is: + + After a repository write with user_id=A, a subsequent read with + user_id=B must not return the row, and vice versa. + +The HTTP layer is covered by test_auth_middleware.py, which proves +that a request cookie reaches the ``set_current_user`` call. Together +the two suites prove the full chain: + + cookie → middleware → contextvar → repository → isolation + +Every test in this file opts out of the autouse contextvar fixture +(``@pytest.mark.no_auto_user``) so it can set the contextvar to the +specific users it cares about. +""" + +from __future__ import annotations + +from types import SimpleNamespace + +import pytest + +from deerflow.runtime.user_context import ( + reset_current_user, + set_current_user, +) + +USER_A = SimpleNamespace(id="user-a", email="a@test.local") +USER_B = SimpleNamespace(id="user-b", email="b@test.local") + + +async def _make_engines(tmp_path): + """Initialize the shared engine against a per-test SQLite DB. + + Returns a cleanup coroutine the caller should await at the end. + """ + from deerflow.persistence.engine import close_engine, init_engine + + url = f"sqlite+aiosqlite:///{tmp_path / 'isolation.db'}" + await init_engine("sqlite", url=url, sqlite_dir=str(tmp_path)) + return close_engine + + +def _as_user(user): + """Context manager-like helper that set/reset the contextvar.""" + + class _Ctx: + def __enter__(self): + self._token = set_current_user(user) + return user + + def __exit__(self, *exc): + reset_current_user(self._token) + + return _Ctx() + + +# ── TC-API-17 — threads_meta isolation ──────────────────────────────────── + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_thread_meta_cross_user_isolation(tmp_path): + from deerflow.persistence.engine import get_session_factory + from deerflow.persistence.thread_meta import ThreadMetaRepository + + cleanup = await _make_engines(tmp_path) + try: + repo = ThreadMetaRepository(get_session_factory()) + + # User A creates a thread. + with _as_user(USER_A): + await repo.create("t-alpha", display_name="A's private thread") + + # User B creates a thread. + with _as_user(USER_B): + await repo.create("t-beta", display_name="B's private thread") + + # User A must see only A's thread. + with _as_user(USER_A): + a_view = await repo.get("t-alpha") + assert a_view is not None + assert a_view["display_name"] == "A's private thread" + + # CRITICAL: User A must NOT see B's thread. + leaked = await repo.get("t-beta") + assert leaked is None, f"User A leaked User B's thread: {leaked}" + + # Search should only return A's threads. + results = await repo.search() + assert [r["thread_id"] for r in results] == ["t-alpha"] + + # User B must see only B's thread. + with _as_user(USER_B): + b_view = await repo.get("t-beta") + assert b_view is not None + assert b_view["display_name"] == "B's private thread" + + leaked = await repo.get("t-alpha") + assert leaked is None, f"User B leaked User A's thread: {leaked}" + + results = await repo.search() + assert [r["thread_id"] for r in results] == ["t-beta"] + finally: + await cleanup() + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_thread_meta_cross_user_mutation_denied(tmp_path): + """User B cannot update or delete a thread owned by User A.""" + from deerflow.persistence.engine import get_session_factory + from deerflow.persistence.thread_meta import ThreadMetaRepository + + cleanup = await _make_engines(tmp_path) + try: + repo = ThreadMetaRepository(get_session_factory()) + + with _as_user(USER_A): + await repo.create("t-alpha", display_name="original") + + # User B tries to rename A's thread — must be a no-op. + with _as_user(USER_B): + await repo.update_display_name("t-alpha", "hacked") + + # Verify the row is unchanged from A's perspective. + with _as_user(USER_A): + row = await repo.get("t-alpha") + assert row is not None + assert row["display_name"] == "original" + + # User B tries to delete A's thread — must be a no-op. + with _as_user(USER_B): + await repo.delete("t-alpha") + + # A's thread still exists. + with _as_user(USER_A): + row = await repo.get("t-alpha") + assert row is not None + finally: + await cleanup() + + +# ── TC-API-18 — runs isolation ──────────────────────────────────────────── + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_runs_cross_user_isolation(tmp_path): + from deerflow.persistence.engine import get_session_factory + from deerflow.persistence.run import RunRepository + + cleanup = await _make_engines(tmp_path) + try: + repo = RunRepository(get_session_factory()) + + with _as_user(USER_A): + await repo.put("run-a1", thread_id="t-alpha") + await repo.put("run-a2", thread_id="t-alpha") + + with _as_user(USER_B): + await repo.put("run-b1", thread_id="t-beta") + + # User A must see only A's runs. + with _as_user(USER_A): + r = await repo.get("run-a1") + assert r is not None + assert r["run_id"] == "run-a1" + + leaked = await repo.get("run-b1") + assert leaked is None, "User A leaked User B's run" + + a_runs = await repo.list_by_thread("t-alpha") + assert {r["run_id"] for r in a_runs} == {"run-a1", "run-a2"} + + # Listing B's thread from A's perspective: empty + empty = await repo.list_by_thread("t-beta") + assert empty == [] + + # User B must see only B's runs. + with _as_user(USER_B): + leaked = await repo.get("run-a1") + assert leaked is None, "User B leaked User A's run" + + b_runs = await repo.list_by_thread("t-beta") + assert [r["run_id"] for r in b_runs] == ["run-b1"] + finally: + await cleanup() + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_runs_cross_user_delete_denied(tmp_path): + from deerflow.persistence.engine import get_session_factory + from deerflow.persistence.run import RunRepository + + cleanup = await _make_engines(tmp_path) + try: + repo = RunRepository(get_session_factory()) + + with _as_user(USER_A): + await repo.put("run-a1", thread_id="t-alpha") + + # User B tries to delete A's run — no-op. + with _as_user(USER_B): + await repo.delete("run-a1") + + # A's run still exists. + with _as_user(USER_A): + row = await repo.get("run-a1") + assert row is not None + finally: + await cleanup() + + +# ── TC-API-19 — run_events isolation (CRITICAL: content leak) ───────────── + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_run_events_cross_user_isolation(tmp_path): + """run_events holds raw conversation content — most sensitive leak vector.""" + from deerflow.persistence.engine import get_session_factory + from deerflow.runtime.events.store.db import DbRunEventStore + + cleanup = await _make_engines(tmp_path) + try: + store = DbRunEventStore(get_session_factory()) + + with _as_user(USER_A): + await store.put( + thread_id="t-alpha", + run_id="run-a1", + event_type="human_message", + category="message", + content="User A private question", + ) + await store.put( + thread_id="t-alpha", + run_id="run-a1", + event_type="ai_message", + category="message", + content="User A private answer", + ) + + with _as_user(USER_B): + await store.put( + thread_id="t-beta", + run_id="run-b1", + event_type="human_message", + category="message", + content="User B private question", + ) + + # User A must see only A's events — CRITICAL. + with _as_user(USER_A): + msgs = await store.list_messages("t-alpha") + contents = [m["content"] for m in msgs] + assert "User A private question" in contents + assert "User A private answer" in contents + # CRITICAL: User B's content must not appear. + assert "User B private question" not in contents + + # Attempt to read B's thread by guessing thread_id. + leaked = await store.list_messages("t-beta") + assert leaked == [], f"User A leaked User B's messages: {leaked}" + + leaked_events = await store.list_events("t-beta", "run-b1") + assert leaked_events == [], "User A leaked User B's events" + + # count_messages must also be zero for B's thread from A's view. + count = await store.count_messages("t-beta") + assert count == 0 + + # User B must see only B's events. + with _as_user(USER_B): + msgs = await store.list_messages("t-beta") + contents = [m["content"] for m in msgs] + assert "User B private question" in contents + assert "User A private question" not in contents + assert "User A private answer" not in contents + + count = await store.count_messages("t-alpha") + assert count == 0 + finally: + await cleanup() + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_run_events_cross_user_delete_denied(tmp_path): + """User B cannot delete User A's event stream.""" + from deerflow.persistence.engine import get_session_factory + from deerflow.runtime.events.store.db import DbRunEventStore + + cleanup = await _make_engines(tmp_path) + try: + store = DbRunEventStore(get_session_factory()) + + with _as_user(USER_A): + await store.put( + thread_id="t-alpha", + run_id="run-a1", + event_type="human_message", + category="message", + content="hello", + ) + + # User B tries to wipe A's thread events. + with _as_user(USER_B): + removed = await store.delete_by_thread("t-alpha") + assert removed == 0, f"User B deleted {removed} of User A's events" + + # A's events still exist. + with _as_user(USER_A): + count = await store.count_messages("t-alpha") + assert count == 1 + finally: + await cleanup() + + +# ── TC-API-20 — feedback isolation ──────────────────────────────────────── + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_feedback_cross_user_isolation(tmp_path): + from deerflow.persistence.engine import get_session_factory + from deerflow.persistence.feedback import FeedbackRepository + + cleanup = await _make_engines(tmp_path) + try: + repo = FeedbackRepository(get_session_factory()) + + # User A submits positive feedback. + with _as_user(USER_A): + a_feedback = await repo.create( + run_id="run-a1", + thread_id="t-alpha", + rating=1, + comment="A liked this", + ) + + # User B submits negative feedback. + with _as_user(USER_B): + b_feedback = await repo.create( + run_id="run-b1", + thread_id="t-beta", + rating=-1, + comment="B disliked this", + ) + + # User A must see only A's feedback. + with _as_user(USER_A): + retrieved = await repo.get(a_feedback["feedback_id"]) + assert retrieved is not None + assert retrieved["comment"] == "A liked this" + + # CRITICAL: cannot read B's feedback by id. + leaked = await repo.get(b_feedback["feedback_id"]) + assert leaked is None, "User A leaked User B's feedback" + + # list_by_run for B's run must be empty. + empty = await repo.list_by_run("t-beta", "run-b1") + assert empty == [] + + # User B must see only B's feedback. + with _as_user(USER_B): + leaked = await repo.get(a_feedback["feedback_id"]) + assert leaked is None, "User B leaked User A's feedback" + + b_list = await repo.list_by_run("t-beta", "run-b1") + assert len(b_list) == 1 + assert b_list[0]["comment"] == "B disliked this" + finally: + await cleanup() + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_feedback_cross_user_delete_denied(tmp_path): + from deerflow.persistence.engine import get_session_factory + from deerflow.persistence.feedback import FeedbackRepository + + cleanup = await _make_engines(tmp_path) + try: + repo = FeedbackRepository(get_session_factory()) + + with _as_user(USER_A): + fb = await repo.create(run_id="run-a1", thread_id="t-alpha", rating=1) + + # User B tries to delete A's feedback — must return False (no-op). + with _as_user(USER_B): + deleted = await repo.delete(fb["feedback_id"]) + assert deleted is False, "User B deleted User A's feedback" + + # A's feedback still retrievable. + with _as_user(USER_A): + row = await repo.get(fb["feedback_id"]) + assert row is not None + finally: + await cleanup() + + +# ── Regression: AUTO sentinel without contextvar must raise ─────────────── + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_repository_without_context_raises(tmp_path): + """Defense-in-depth: calling repo methods without a user context errors.""" + from deerflow.persistence.engine import get_session_factory + from deerflow.persistence.thread_meta import ThreadMetaRepository + + cleanup = await _make_engines(tmp_path) + try: + repo = ThreadMetaRepository(get_session_factory()) + # Contextvar is explicitly unset under @pytest.mark.no_auto_user. + with pytest.raises(RuntimeError, match="no user context is set"): + await repo.get("anything") + finally: + await cleanup() + + +# ── Escape hatch: explicit user_id=None bypasses filter (for migration) ── + + +@pytest.mark.anyio +@pytest.mark.no_auto_user +async def test_explicit_none_bypasses_filter(tmp_path): + """Migration scripts pass user_id=None to see all rows regardless of owner.""" + from deerflow.persistence.engine import get_session_factory + from deerflow.persistence.thread_meta import ThreadMetaRepository + + cleanup = await _make_engines(tmp_path) + try: + repo = ThreadMetaRepository(get_session_factory()) + + # Seed data as two different users. + with _as_user(USER_A): + await repo.create("t-alpha") + with _as_user(USER_B): + await repo.create("t-beta") + + # Migration-style read: no contextvar, explicit None bypass. + all_rows = await repo.search(user_id=None) + thread_ids = {r["thread_id"] for r in all_rows} + assert thread_ids == {"t-alpha", "t-beta"} + + # Explicit get with None does not apply the filter either. + row_a = await repo.get("t-alpha", user_id=None) + assert row_a is not None + row_b = await repo.get("t-beta", user_id=None) + assert row_b is not None + finally: + await cleanup() diff --git a/backend/tests/test_paths_user_isolation.py b/backend/tests/test_paths_user_isolation.py new file mode 100644 index 000000000..8f312dcff --- /dev/null +++ b/backend/tests/test_paths_user_isolation.py @@ -0,0 +1,169 @@ +"""Tests for user-scoped path resolution in Paths.""" + +from pathlib import Path + +import pytest + +from deerflow.config.paths import Paths + + +@pytest.fixture +def paths(tmp_path: Path) -> Paths: + return Paths(tmp_path) + + +class TestValidateUserId: + def test_valid_user_id(self, paths: Paths): + d = paths.user_dir("u-abc-123") + assert d == paths.base_dir / "users" / "u-abc-123" + + def test_rejects_path_traversal(self, paths: Paths): + with pytest.raises(ValueError, match="Invalid user_id"): + paths.user_dir("../escape") + + def test_rejects_slash(self, paths: Paths): + with pytest.raises(ValueError, match="Invalid user_id"): + paths.user_dir("foo/bar") + + def test_rejects_empty(self, paths: Paths): + with pytest.raises(ValueError, match="Invalid user_id"): + paths.user_dir("") + + +class TestUserDir: + def test_user_dir(self, paths: Paths): + assert paths.user_dir("alice") == paths.base_dir / "users" / "alice" + + +class TestUserMemoryFile: + def test_user_memory_file(self, paths: Paths): + assert paths.user_memory_file("bob") == paths.base_dir / "users" / "bob" / "memory.json" + + +class TestUserAgentMemoryFile: + def test_user_agent_memory_file(self, paths: Paths): + expected = paths.base_dir / "users" / "bob" / "agents" / "myagent" / "memory.json" + assert paths.user_agent_memory_file("bob", "myagent") == expected + + def test_user_agent_memory_file_lowercases_name(self, paths: Paths): + expected = paths.base_dir / "users" / "bob" / "agents" / "myagent" / "memory.json" + assert paths.user_agent_memory_file("bob", "MyAgent") == expected + + +class TestUserThreadDir: + def test_user_thread_dir(self, paths: Paths): + expected = paths.base_dir / "users" / "u1" / "threads" / "t1" + assert paths.thread_dir("t1", user_id="u1") == expected + + def test_thread_dir_no_user_id_falls_back_to_legacy(self, paths: Paths): + expected = paths.base_dir / "threads" / "t1" + assert paths.thread_dir("t1") == expected + + +class TestUserSandboxDirs: + def test_sandbox_work_dir(self, paths: Paths): + expected = paths.base_dir / "users" / "u1" / "threads" / "t1" / "user-data" / "workspace" + assert paths.sandbox_work_dir("t1", user_id="u1") == expected + + def test_sandbox_uploads_dir(self, paths: Paths): + expected = paths.base_dir / "users" / "u1" / "threads" / "t1" / "user-data" / "uploads" + assert paths.sandbox_uploads_dir("t1", user_id="u1") == expected + + def test_sandbox_outputs_dir(self, paths: Paths): + expected = paths.base_dir / "users" / "u1" / "threads" / "t1" / "user-data" / "outputs" + assert paths.sandbox_outputs_dir("t1", user_id="u1") == expected + + def test_sandbox_user_data_dir(self, paths: Paths): + expected = paths.base_dir / "users" / "u1" / "threads" / "t1" / "user-data" + assert paths.sandbox_user_data_dir("t1", user_id="u1") == expected + + def test_acp_workspace_dir(self, paths: Paths): + expected = paths.base_dir / "users" / "u1" / "threads" / "t1" / "acp-workspace" + assert paths.acp_workspace_dir("t1", user_id="u1") == expected + + def test_legacy_sandbox_work_dir(self, paths: Paths): + expected = paths.base_dir / "threads" / "t1" / "user-data" / "workspace" + assert paths.sandbox_work_dir("t1") == expected + + +class TestHostPathsWithUserId: + def test_host_thread_dir_with_user_id(self, paths: Paths): + result = paths.host_thread_dir("t1", user_id="u1") + assert "users" in result + assert "u1" in result + assert "threads" in result + assert "t1" in result + + def test_host_thread_dir_legacy(self, paths: Paths): + result = paths.host_thread_dir("t1") + assert "threads" in result + assert "t1" in result + assert "users" not in result + + def test_host_sandbox_user_data_dir_with_user_id(self, paths: Paths): + result = paths.host_sandbox_user_data_dir("t1", user_id="u1") + assert "users" in result + assert "user-data" in result + + def test_host_sandbox_work_dir_with_user_id(self, paths: Paths): + result = paths.host_sandbox_work_dir("t1", user_id="u1") + assert "workspace" in result + + def test_host_sandbox_uploads_dir_with_user_id(self, paths: Paths): + result = paths.host_sandbox_uploads_dir("t1", user_id="u1") + assert "uploads" in result + + def test_host_sandbox_outputs_dir_with_user_id(self, paths: Paths): + result = paths.host_sandbox_outputs_dir("t1", user_id="u1") + assert "outputs" in result + + def test_host_acp_workspace_dir_with_user_id(self, paths: Paths): + result = paths.host_acp_workspace_dir("t1", user_id="u1") + assert "acp-workspace" in result + + +class TestEnsureAndDeleteWithUserId: + def test_ensure_thread_dirs_creates_user_scoped(self, paths: Paths): + paths.ensure_thread_dirs("t1", user_id="u1") + assert paths.sandbox_work_dir("t1", user_id="u1").is_dir() + assert paths.sandbox_uploads_dir("t1", user_id="u1").is_dir() + assert paths.sandbox_outputs_dir("t1", user_id="u1").is_dir() + assert paths.acp_workspace_dir("t1", user_id="u1").is_dir() + + def test_delete_thread_dir_removes_user_scoped(self, paths: Paths): + paths.ensure_thread_dirs("t1", user_id="u1") + assert paths.thread_dir("t1", user_id="u1").exists() + paths.delete_thread_dir("t1", user_id="u1") + assert not paths.thread_dir("t1", user_id="u1").exists() + + def test_delete_thread_dir_idempotent(self, paths: Paths): + paths.delete_thread_dir("nonexistent", user_id="u1") # should not raise + + def test_ensure_thread_dirs_legacy_still_works(self, paths: Paths): + paths.ensure_thread_dirs("t1") + assert paths.sandbox_work_dir("t1").is_dir() + + def test_user_scoped_and_legacy_are_independent(self, paths: Paths): + paths.ensure_thread_dirs("t1", user_id="u1") + paths.ensure_thread_dirs("t1") + # Both exist independently + assert paths.thread_dir("t1", user_id="u1").exists() + assert paths.thread_dir("t1").exists() + # Delete one doesn't affect the other + paths.delete_thread_dir("t1", user_id="u1") + assert not paths.thread_dir("t1", user_id="u1").exists() + assert paths.thread_dir("t1").exists() + + +class TestResolveVirtualPathWithUserId: + def test_resolve_virtual_path_with_user_id(self, paths: Paths): + paths.ensure_thread_dirs("t1", user_id="u1") + result = paths.resolve_virtual_path("t1", "/mnt/user-data/workspace/file.txt", user_id="u1") + expected_base = paths.sandbox_user_data_dir("t1", user_id="u1").resolve() + assert str(result).startswith(str(expected_base)) + + def test_resolve_virtual_path_legacy(self, paths: Paths): + paths.ensure_thread_dirs("t1") + result = paths.resolve_virtual_path("t1", "/mnt/user-data/workspace/file.txt") + expected_base = paths.sandbox_user_data_dir("t1").resolve() + assert str(result).startswith(str(expected_base)) diff --git a/backend/tests/test_persistence_scaffold.py b/backend/tests/test_persistence_scaffold.py new file mode 100644 index 000000000..178a08e84 --- /dev/null +++ b/backend/tests/test_persistence_scaffold.py @@ -0,0 +1,233 @@ +"""Tests for the persistence layer scaffolding. + +Tests: +1. DatabaseConfig property derivation (paths, URLs) +2. MemoryRunStore CRUD + user_id filtering +3. Base.to_dict() via inspect mixin +4. Engine init/close lifecycle (memory + SQLite) +5. Postgres missing-dep error message +""" + +from datetime import UTC, datetime + +import pytest + +from deerflow.config.database_config import DatabaseConfig +from deerflow.runtime.runs.store.memory import MemoryRunStore + +# -- DatabaseConfig -- + + +class TestDatabaseConfig: + def test_defaults(self): + c = DatabaseConfig() + assert c.backend == "memory" + assert c.pool_size == 5 + + def test_sqlite_paths_unified(self): + c = DatabaseConfig(backend="sqlite", sqlite_dir="./mydata") + assert c.sqlite_path.endswith("deerflow.db") + assert "mydata" in c.sqlite_path + # Backward-compatible aliases point to the same file + assert c.checkpointer_sqlite_path == c.sqlite_path + assert c.app_sqlite_path == c.sqlite_path + + def test_app_sqlalchemy_url_sqlite(self): + c = DatabaseConfig(backend="sqlite", sqlite_dir="./data") + url = c.app_sqlalchemy_url + assert url.startswith("sqlite+aiosqlite:///") + assert "deerflow.db" in url + + def test_app_sqlalchemy_url_postgres(self): + c = DatabaseConfig( + backend="postgres", + postgres_url="postgresql://u:p@h:5432/db", + ) + url = c.app_sqlalchemy_url + assert url.startswith("postgresql+asyncpg://") + assert "u:p@h:5432/db" in url + + def test_app_sqlalchemy_url_postgres_already_asyncpg(self): + c = DatabaseConfig( + backend="postgres", + postgres_url="postgresql+asyncpg://u:p@h:5432/db", + ) + url = c.app_sqlalchemy_url + assert url.count("asyncpg") == 1 + + def test_memory_has_no_url(self): + c = DatabaseConfig(backend="memory") + with pytest.raises(ValueError, match="No SQLAlchemy URL"): + _ = c.app_sqlalchemy_url + + +# -- MemoryRunStore -- + + +class TestMemoryRunStore: + @pytest.fixture + def store(self): + return MemoryRunStore() + + @pytest.mark.anyio + async def test_put_and_get(self, store): + await store.put("r1", thread_id="t1", status="pending") + row = await store.get("r1") + assert row is not None + assert row["run_id"] == "r1" + assert row["status"] == "pending" + + @pytest.mark.anyio + async def test_get_missing_returns_none(self, store): + assert await store.get("nope") is None + + @pytest.mark.anyio + async def test_update_status(self, store): + await store.put("r1", thread_id="t1") + await store.update_status("r1", "running") + assert (await store.get("r1"))["status"] == "running" + + @pytest.mark.anyio + async def test_update_status_with_error(self, store): + await store.put("r1", thread_id="t1") + await store.update_status("r1", "error", error="boom") + row = await store.get("r1") + assert row["status"] == "error" + assert row["error"] == "boom" + + @pytest.mark.anyio + async def test_list_by_thread(self, store): + await store.put("r1", thread_id="t1") + await store.put("r2", thread_id="t1") + await store.put("r3", thread_id="t2") + rows = await store.list_by_thread("t1") + assert len(rows) == 2 + assert all(r["thread_id"] == "t1" for r in rows) + + @pytest.mark.anyio + async def test_list_by_thread_owner_filter(self, store): + await store.put("r1", thread_id="t1", user_id="alice") + await store.put("r2", thread_id="t1", user_id="bob") + rows = await store.list_by_thread("t1", user_id="alice") + assert len(rows) == 1 + assert rows[0]["user_id"] == "alice" + + @pytest.mark.anyio + async def test_owner_none_returns_all(self, store): + await store.put("r1", thread_id="t1", user_id="alice") + await store.put("r2", thread_id="t1", user_id="bob") + rows = await store.list_by_thread("t1", user_id=None) + assert len(rows) == 2 + + @pytest.mark.anyio + async def test_delete(self, store): + await store.put("r1", thread_id="t1") + await store.delete("r1") + assert await store.get("r1") is None + + @pytest.mark.anyio + async def test_delete_nonexistent_is_noop(self, store): + await store.delete("nope") # should not raise + + @pytest.mark.anyio + async def test_list_pending(self, store): + await store.put("r1", thread_id="t1", status="pending") + await store.put("r2", thread_id="t1", status="running") + await store.put("r3", thread_id="t2", status="pending") + pending = await store.list_pending() + assert len(pending) == 2 + assert all(r["status"] == "pending" for r in pending) + + @pytest.mark.anyio + async def test_list_pending_respects_before(self, store): + past = "2020-01-01T00:00:00+00:00" + future = "2099-01-01T00:00:00+00:00" + await store.put("r1", thread_id="t1", status="pending", created_at=past) + await store.put("r2", thread_id="t1", status="pending", created_at=future) + pending = await store.list_pending(before=datetime.now(UTC).isoformat()) + assert len(pending) == 1 + assert pending[0]["run_id"] == "r1" + + @pytest.mark.anyio + async def test_list_pending_fifo_order(self, store): + await store.put("r2", thread_id="t1", status="pending", created_at="2024-01-02T00:00:00+00:00") + await store.put("r1", thread_id="t1", status="pending", created_at="2024-01-01T00:00:00+00:00") + pending = await store.list_pending() + assert pending[0]["run_id"] == "r1" + + +# -- Base.to_dict mixin -- + + +class TestBaseToDictMixin: + @pytest.mark.anyio + async def test_to_dict_and_exclude(self, tmp_path): + """Create a temp SQLite DB with a minimal model, verify to_dict.""" + from sqlalchemy import String + from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine + from sqlalchemy.orm import Mapped, mapped_column + + from deerflow.persistence.base import Base + + class _Tmp(Base): + __tablename__ = "_tmp_test" + id: Mapped[str] = mapped_column(String(64), primary_key=True) + name: Mapped[str] = mapped_column(String(128)) + + engine = create_async_engine(f"sqlite+aiosqlite:///{tmp_path / 'test.db'}") + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + sf = async_sessionmaker(engine, expire_on_commit=False) + async with sf() as session: + session.add(_Tmp(id="1", name="hello")) + await session.commit() + obj = await session.get(_Tmp, "1") + + assert obj.to_dict() == {"id": "1", "name": "hello"} + assert obj.to_dict(exclude={"name"}) == {"id": "1"} + assert "_Tmp" in repr(obj) + + await engine.dispose() + + +# -- Engine lifecycle -- + + +class TestEngineLifecycle: + @pytest.mark.anyio + async def test_memory_is_noop(self): + from deerflow.persistence.engine import close_engine, get_session_factory, init_engine + + await init_engine("memory") + assert get_session_factory() is None + await close_engine() + + @pytest.mark.anyio + async def test_sqlite_creates_engine(self, tmp_path): + from deerflow.persistence.engine import close_engine, get_session_factory, init_engine + + url = f"sqlite+aiosqlite:///{tmp_path / 'test.db'}" + await init_engine("sqlite", url=url, sqlite_dir=str(tmp_path)) + sf = get_session_factory() + assert sf is not None + async with sf() as session: + assert session is not None + await close_engine() + assert get_session_factory() is None + + @pytest.mark.anyio + async def test_postgres_without_asyncpg_gives_actionable_error(self): + """If asyncpg is not installed, error message tells user what to do.""" + from deerflow.persistence.engine import init_engine + + try: + import asyncpg # noqa: F401 + + pytest.skip("asyncpg is installed -- cannot test missing-dep path") + except ImportError: + # asyncpg is not installed — this is the expected state for this test. + # We proceed to verify that init_engine raises an actionable ImportError. + pass # noqa: S110 — intentionally ignored + with pytest.raises(ImportError, match="uv sync --extra postgres"): + await init_engine("postgres", url="postgresql+asyncpg://x:x@localhost/x") diff --git a/backend/tests/test_present_file_tool_core_logic.py b/backend/tests/test_present_file_tool_core_logic.py index df5c78229..0c064b56b 100644 --- a/backend/tests/test_present_file_tool_core_logic.py +++ b/backend/tests/test_present_file_tool_core_logic.py @@ -39,7 +39,7 @@ def test_present_files_keeps_virtual_outputs_path(tmp_path, monkeypatch): monkeypatch.setattr( present_file_tool_module, "get_paths", - lambda: SimpleNamespace(resolve_virtual_path=lambda thread_id, path: artifact_path), + lambda: SimpleNamespace(resolve_virtual_path=lambda thread_id, path, *, user_id=None: artifact_path), ) result = present_file_tool_module.present_file_tool.func( diff --git a/backend/tests/test_run_event_store.py b/backend/tests/test_run_event_store.py new file mode 100644 index 000000000..2b22b2c6f --- /dev/null +++ b/backend/tests/test_run_event_store.py @@ -0,0 +1,500 @@ +"""Tests for RunEventStore contract across all backends. + +Uses a helper to create the store for each backend type. +Memory tests run directly; DB and JSONL tests create stores inside each test. +""" + +import pytest + +from deerflow.runtime.events.store.memory import MemoryRunEventStore + + +@pytest.fixture +def store(): + return MemoryRunEventStore() + + +# -- Basic write and query -- + + +class TestPutAndSeq: + @pytest.mark.anyio + async def test_put_returns_dict_with_seq(self, store): + record = await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message", content="hello") + assert "seq" in record + assert record["seq"] == 1 + assert record["thread_id"] == "t1" + assert record["run_id"] == "r1" + assert record["event_type"] == "human_message" + assert record["category"] == "message" + assert record["content"] == "hello" + assert "created_at" in record + + @pytest.mark.anyio + async def test_seq_strictly_increasing_same_thread(self, store): + r1 = await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + r2 = await store.put(thread_id="t1", run_id="r1", event_type="ai_message", category="message") + r3 = await store.put(thread_id="t1", run_id="r1", event_type="llm_end", category="trace") + assert r1["seq"] == 1 + assert r2["seq"] == 2 + assert r3["seq"] == 3 + + @pytest.mark.anyio + async def test_seq_independent_across_threads(self, store): + r1 = await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + r2 = await store.put(thread_id="t2", run_id="r2", event_type="human_message", category="message") + assert r1["seq"] == 1 + assert r2["seq"] == 1 + + @pytest.mark.anyio + async def test_put_respects_provided_created_at(self, store): + ts = "2024-06-01T12:00:00+00:00" + record = await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message", created_at=ts) + assert record["created_at"] == ts + + @pytest.mark.anyio + async def test_put_metadata_preserved(self, store): + meta = {"model": "gpt-4", "tokens": 100} + record = await store.put(thread_id="t1", run_id="r1", event_type="llm_end", category="trace", metadata=meta) + assert record["metadata"] == meta + + +# -- list_messages -- + + +class TestListMessages: + @pytest.mark.anyio + async def test_only_returns_message_category(self, store): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + await store.put(thread_id="t1", run_id="r1", event_type="llm_end", category="trace") + await store.put(thread_id="t1", run_id="r1", event_type="run_start", category="lifecycle") + messages = await store.list_messages("t1") + assert len(messages) == 1 + assert messages[0]["category"] == "message" + + @pytest.mark.anyio + async def test_ascending_seq_order(self, store): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message", content="first") + await store.put(thread_id="t1", run_id="r1", event_type="ai_message", category="message", content="second") + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message", content="third") + messages = await store.list_messages("t1") + seqs = [m["seq"] for m in messages] + assert seqs == sorted(seqs) + + @pytest.mark.anyio + async def test_before_seq_pagination(self, store): + for i in range(10): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message", content=str(i)) + messages = await store.list_messages("t1", before_seq=6, limit=3) + assert len(messages) == 3 + assert [m["seq"] for m in messages] == [3, 4, 5] + + @pytest.mark.anyio + async def test_after_seq_pagination(self, store): + for i in range(10): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message", content=str(i)) + messages = await store.list_messages("t1", after_seq=7, limit=3) + assert len(messages) == 3 + assert [m["seq"] for m in messages] == [8, 9, 10] + + @pytest.mark.anyio + async def test_limit_restricts_count(self, store): + for _ in range(20): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + messages = await store.list_messages("t1", limit=5) + assert len(messages) == 5 + + @pytest.mark.anyio + async def test_cross_run_unified_ordering(self, store): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + await store.put(thread_id="t1", run_id="r1", event_type="ai_message", category="message") + await store.put(thread_id="t1", run_id="r2", event_type="human_message", category="message") + await store.put(thread_id="t1", run_id="r2", event_type="ai_message", category="message") + messages = await store.list_messages("t1") + assert [m["seq"] for m in messages] == [1, 2, 3, 4] + assert messages[0]["run_id"] == "r1" + assert messages[2]["run_id"] == "r2" + + @pytest.mark.anyio + async def test_default_returns_latest(self, store): + for _ in range(10): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + messages = await store.list_messages("t1", limit=3) + assert [m["seq"] for m in messages] == [8, 9, 10] + + +# -- list_events -- + + +class TestListEvents: + @pytest.mark.anyio + async def test_returns_all_categories_for_run(self, store): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + await store.put(thread_id="t1", run_id="r1", event_type="llm_end", category="trace") + await store.put(thread_id="t1", run_id="r1", event_type="run_start", category="lifecycle") + events = await store.list_events("t1", "r1") + assert len(events) == 3 + + @pytest.mark.anyio + async def test_event_types_filter(self, store): + await store.put(thread_id="t1", run_id="r1", event_type="llm_start", category="trace") + await store.put(thread_id="t1", run_id="r1", event_type="llm_end", category="trace") + await store.put(thread_id="t1", run_id="r1", event_type="tool_start", category="trace") + events = await store.list_events("t1", "r1", event_types=["llm_end"]) + assert len(events) == 1 + assert events[0]["event_type"] == "llm_end" + + @pytest.mark.anyio + async def test_only_returns_specified_run(self, store): + await store.put(thread_id="t1", run_id="r1", event_type="llm_end", category="trace") + await store.put(thread_id="t1", run_id="r2", event_type="llm_end", category="trace") + events = await store.list_events("t1", "r1") + assert len(events) == 1 + assert events[0]["run_id"] == "r1" + + +# -- list_messages_by_run -- + + +class TestListMessagesByRun: + @pytest.mark.anyio + async def test_only_messages_for_specified_run(self, store): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + await store.put(thread_id="t1", run_id="r1", event_type="llm_end", category="trace") + await store.put(thread_id="t1", run_id="r2", event_type="human_message", category="message") + messages = await store.list_messages_by_run("t1", "r1") + assert len(messages) == 1 + assert messages[0]["run_id"] == "r1" + assert messages[0]["category"] == "message" + + +# -- count_messages -- + + +class TestCountMessages: + @pytest.mark.anyio + async def test_counts_only_message_category(self, store): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + await store.put(thread_id="t1", run_id="r1", event_type="ai_message", category="message") + await store.put(thread_id="t1", run_id="r1", event_type="llm_end", category="trace") + assert await store.count_messages("t1") == 2 + + +# -- put_batch -- + + +class TestPutBatch: + @pytest.mark.anyio + async def test_batch_assigns_seq(self, store): + events = [ + {"thread_id": "t1", "run_id": "r1", "event_type": "human_message", "category": "message", "content": "a"}, + {"thread_id": "t1", "run_id": "r1", "event_type": "ai_message", "category": "message", "content": "b"}, + {"thread_id": "t1", "run_id": "r1", "event_type": "llm_end", "category": "trace"}, + ] + results = await store.put_batch(events) + assert len(results) == 3 + assert all("seq" in r for r in results) + + @pytest.mark.anyio + async def test_batch_seq_strictly_increasing(self, store): + events = [ + {"thread_id": "t1", "run_id": "r1", "event_type": "human_message", "category": "message"}, + {"thread_id": "t1", "run_id": "r1", "event_type": "ai_message", "category": "message"}, + ] + results = await store.put_batch(events) + assert results[0]["seq"] == 1 + assert results[1]["seq"] == 2 + + +# -- delete -- + + +class TestDelete: + @pytest.mark.anyio + async def test_delete_by_thread(self, store): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + await store.put(thread_id="t1", run_id="r1", event_type="ai_message", category="message") + await store.put(thread_id="t1", run_id="r2", event_type="llm_end", category="trace") + count = await store.delete_by_thread("t1") + assert count == 3 + assert await store.list_messages("t1") == [] + assert await store.count_messages("t1") == 0 + + @pytest.mark.anyio + async def test_delete_by_run(self, store): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + await store.put(thread_id="t1", run_id="r2", event_type="human_message", category="message") + await store.put(thread_id="t1", run_id="r2", event_type="llm_end", category="trace") + count = await store.delete_by_run("t1", "r2") + assert count == 2 + messages = await store.list_messages("t1") + assert len(messages) == 1 + assert messages[0]["run_id"] == "r1" + + @pytest.mark.anyio + async def test_delete_nonexistent_thread_returns_zero(self, store): + assert await store.delete_by_thread("nope") == 0 + + @pytest.mark.anyio + async def test_delete_nonexistent_run_returns_zero(self, store): + await store.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + assert await store.delete_by_run("t1", "nope") == 0 + + @pytest.mark.anyio + async def test_delete_nonexistent_thread_for_run_returns_zero(self, store): + assert await store.delete_by_run("nope", "r1") == 0 + + +# -- Edge cases -- + + +class TestEdgeCases: + @pytest.mark.anyio + async def test_empty_thread_list_messages(self, store): + assert await store.list_messages("empty") == [] + + @pytest.mark.anyio + async def test_empty_run_list_events(self, store): + assert await store.list_events("empty", "r1") == [] + + @pytest.mark.anyio + async def test_empty_thread_count_messages(self, store): + assert await store.count_messages("empty") == 0 + + +# -- DB-specific tests -- + + +class TestDbRunEventStore: + """Tests for DbRunEventStore with temp SQLite.""" + + @pytest.mark.anyio + async def test_basic_crud(self, tmp_path): + from deerflow.persistence.engine import close_engine, get_session_factory, init_engine + from deerflow.runtime.events.store.db import DbRunEventStore + + url = f"sqlite+aiosqlite:///{tmp_path / 'test.db'}" + await init_engine("sqlite", url=url, sqlite_dir=str(tmp_path)) + s = DbRunEventStore(get_session_factory()) + + r = await s.put(thread_id="t1", run_id="r1", event_type="human_message", category="message", content="hi") + assert r["seq"] == 1 + r2 = await s.put(thread_id="t1", run_id="r1", event_type="ai_message", category="message", content="hello") + assert r2["seq"] == 2 + + messages = await s.list_messages("t1") + assert len(messages) == 2 + + count = await s.count_messages("t1") + assert count == 2 + + await close_engine() + + @pytest.mark.anyio + async def test_trace_content_truncation(self, tmp_path): + from deerflow.persistence.engine import close_engine, get_session_factory, init_engine + from deerflow.runtime.events.store.db import DbRunEventStore + + url = f"sqlite+aiosqlite:///{tmp_path / 'test.db'}" + await init_engine("sqlite", url=url, sqlite_dir=str(tmp_path)) + s = DbRunEventStore(get_session_factory(), max_trace_content=100) + + long = "x" * 200 + r = await s.put(thread_id="t1", run_id="r1", event_type="llm_end", category="trace", content=long) + assert len(r["content"]) == 100 + assert r["metadata"].get("content_truncated") is True + + # message content NOT truncated + m = await s.put(thread_id="t1", run_id="r1", event_type="ai_message", category="message", content=long) + assert len(m["content"]) == 200 + + await close_engine() + + @pytest.mark.anyio + async def test_pagination(self, tmp_path): + from deerflow.persistence.engine import close_engine, get_session_factory, init_engine + from deerflow.runtime.events.store.db import DbRunEventStore + + url = f"sqlite+aiosqlite:///{tmp_path / 'test.db'}" + await init_engine("sqlite", url=url, sqlite_dir=str(tmp_path)) + s = DbRunEventStore(get_session_factory()) + + for i in range(10): + await s.put(thread_id="t1", run_id="r1", event_type="human_message", category="message", content=str(i)) + + # before_seq + msgs = await s.list_messages("t1", before_seq=6, limit=3) + assert [m["seq"] for m in msgs] == [3, 4, 5] + + # after_seq + msgs = await s.list_messages("t1", after_seq=7, limit=3) + assert [m["seq"] for m in msgs] == [8, 9, 10] + + # default (latest) + msgs = await s.list_messages("t1", limit=3) + assert [m["seq"] for m in msgs] == [8, 9, 10] + + await close_engine() + + @pytest.mark.anyio + async def test_delete(self, tmp_path): + from deerflow.persistence.engine import close_engine, get_session_factory, init_engine + from deerflow.runtime.events.store.db import DbRunEventStore + + url = f"sqlite+aiosqlite:///{tmp_path / 'test.db'}" + await init_engine("sqlite", url=url, sqlite_dir=str(tmp_path)) + s = DbRunEventStore(get_session_factory()) + + await s.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + await s.put(thread_id="t1", run_id="r2", event_type="ai_message", category="message") + c = await s.delete_by_run("t1", "r2") + assert c == 1 + assert await s.count_messages("t1") == 1 + + c = await s.delete_by_thread("t1") + assert c == 1 + assert await s.count_messages("t1") == 0 + + await close_engine() + + @pytest.mark.anyio + async def test_put_batch_seq_continuity(self, tmp_path): + """Batch write produces continuous seq values with no gaps.""" + from deerflow.persistence.engine import close_engine, get_session_factory, init_engine + from deerflow.runtime.events.store.db import DbRunEventStore + + url = f"sqlite+aiosqlite:///{tmp_path / 'test.db'}" + await init_engine("sqlite", url=url, sqlite_dir=str(tmp_path)) + s = DbRunEventStore(get_session_factory()) + + events = [{"thread_id": "t1", "run_id": "r1", "event_type": "trace", "category": "trace"} for _ in range(50)] + results = await s.put_batch(events) + seqs = [r["seq"] for r in results] + assert seqs == list(range(1, 51)) + await close_engine() + + +# -- Factory tests -- + + +class TestMakeRunEventStore: + """Tests for the make_run_event_store factory function.""" + + @pytest.mark.anyio + async def test_memory_backend_default(self): + from deerflow.runtime.events.store import make_run_event_store + + store = make_run_event_store(None) + assert type(store).__name__ == "MemoryRunEventStore" + + @pytest.mark.anyio + async def test_memory_backend_explicit(self): + from unittest.mock import MagicMock + + from deerflow.runtime.events.store import make_run_event_store + + config = MagicMock() + config.backend = "memory" + store = make_run_event_store(config) + assert type(store).__name__ == "MemoryRunEventStore" + + @pytest.mark.anyio + async def test_db_backend_with_engine(self, tmp_path): + from unittest.mock import MagicMock + + from deerflow.persistence.engine import close_engine, init_engine + from deerflow.runtime.events.store import make_run_event_store + + url = f"sqlite+aiosqlite:///{tmp_path / 'test.db'}" + await init_engine("sqlite", url=url, sqlite_dir=str(tmp_path)) + + config = MagicMock() + config.backend = "db" + config.max_trace_content = 10240 + store = make_run_event_store(config) + assert type(store).__name__ == "DbRunEventStore" + await close_engine() + + @pytest.mark.anyio + async def test_db_backend_no_engine_falls_back(self): + """db backend without engine falls back to memory.""" + from unittest.mock import MagicMock + + from deerflow.persistence.engine import close_engine, init_engine + from deerflow.runtime.events.store import make_run_event_store + + await init_engine("memory") # no engine created + + config = MagicMock() + config.backend = "db" + store = make_run_event_store(config) + assert type(store).__name__ == "MemoryRunEventStore" + await close_engine() + + @pytest.mark.anyio + async def test_jsonl_backend(self): + from unittest.mock import MagicMock + + from deerflow.runtime.events.store import make_run_event_store + + config = MagicMock() + config.backend = "jsonl" + store = make_run_event_store(config) + assert type(store).__name__ == "JsonlRunEventStore" + + @pytest.mark.anyio + async def test_unknown_backend_raises(self): + from unittest.mock import MagicMock + + from deerflow.runtime.events.store import make_run_event_store + + config = MagicMock() + config.backend = "redis" + with pytest.raises(ValueError, match="Unknown"): + make_run_event_store(config) + + +# -- JSONL-specific tests -- + + +class TestJsonlRunEventStore: + @pytest.mark.anyio + async def test_basic_crud(self, tmp_path): + from deerflow.runtime.events.store.jsonl import JsonlRunEventStore + + s = JsonlRunEventStore(base_dir=tmp_path / "jsonl") + r = await s.put(thread_id="t1", run_id="r1", event_type="human_message", category="message", content="hi") + assert r["seq"] == 1 + messages = await s.list_messages("t1") + assert len(messages) == 1 + + @pytest.mark.anyio + async def test_file_at_correct_path(self, tmp_path): + from deerflow.runtime.events.store.jsonl import JsonlRunEventStore + + s = JsonlRunEventStore(base_dir=tmp_path / "jsonl") + await s.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + assert (tmp_path / "jsonl" / "threads" / "t1" / "runs" / "r1.jsonl").exists() + + @pytest.mark.anyio + async def test_cross_run_messages(self, tmp_path): + from deerflow.runtime.events.store.jsonl import JsonlRunEventStore + + s = JsonlRunEventStore(base_dir=tmp_path / "jsonl") + await s.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + await s.put(thread_id="t1", run_id="r2", event_type="human_message", category="message") + messages = await s.list_messages("t1") + assert len(messages) == 2 + assert [m["seq"] for m in messages] == [1, 2] + + @pytest.mark.anyio + async def test_delete_by_run(self, tmp_path): + from deerflow.runtime.events.store.jsonl import JsonlRunEventStore + + s = JsonlRunEventStore(base_dir=tmp_path / "jsonl") + await s.put(thread_id="t1", run_id="r1", event_type="human_message", category="message") + await s.put(thread_id="t1", run_id="r2", event_type="human_message", category="message") + c = await s.delete_by_run("t1", "r2") + assert c == 1 + assert not (tmp_path / "jsonl" / "threads" / "t1" / "runs" / "r2.jsonl").exists() + assert await s.count_messages("t1") == 1 diff --git a/backend/tests/test_run_event_store_pagination.py b/backend/tests/test_run_event_store_pagination.py new file mode 100644 index 000000000..14a09610c --- /dev/null +++ b/backend/tests/test_run_event_store_pagination.py @@ -0,0 +1,125 @@ +"""Tests for paginated list_messages_by_run across all RunEventStore backends.""" + +import pytest + +from deerflow.runtime.events.store.memory import MemoryRunEventStore + + +@pytest.fixture +def base_store(): + return MemoryRunEventStore() + + +@pytest.mark.anyio +async def test_list_messages_by_run_default_returns_all(base_store): + store = base_store + for i in range(7): + await store.put( + thread_id="t1", + run_id="run-a", + event_type="human_message" if i % 2 == 0 else "ai_message", + category="message", + content=f"msg-a-{i}", + ) + for i in range(3): + await store.put( + thread_id="t1", + run_id="run-b", + event_type="human_message", + category="message", + content=f"msg-b-{i}", + ) + await store.put(thread_id="t1", run_id="run-a", event_type="tool_call", category="trace", content="trace") + + msgs = await store.list_messages_by_run("t1", "run-a") + assert len(msgs) == 7 + assert all(m["category"] == "message" for m in msgs) + assert all(m["run_id"] == "run-a" for m in msgs) + + +@pytest.mark.anyio +async def test_list_messages_by_run_with_limit(base_store): + store = base_store + for i in range(7): + await store.put( + thread_id="t1", + run_id="run-a", + event_type="human_message" if i % 2 == 0 else "ai_message", + category="message", + content=f"msg-a-{i}", + ) + + msgs = await store.list_messages_by_run("t1", "run-a", limit=3) + assert len(msgs) == 3 + seqs = [m["seq"] for m in msgs] + assert seqs == sorted(seqs) + + +@pytest.mark.anyio +async def test_list_messages_by_run_after_seq(base_store): + store = base_store + for i in range(7): + await store.put( + thread_id="t1", + run_id="run-a", + event_type="human_message" if i % 2 == 0 else "ai_message", + category="message", + content=f"msg-a-{i}", + ) + + all_msgs = await store.list_messages_by_run("t1", "run-a") + cursor_seq = all_msgs[2]["seq"] + msgs = await store.list_messages_by_run("t1", "run-a", after_seq=cursor_seq, limit=50) + assert all(m["seq"] > cursor_seq for m in msgs) + assert len(msgs) == 4 + + +@pytest.mark.anyio +async def test_list_messages_by_run_before_seq(base_store): + store = base_store + for i in range(7): + await store.put( + thread_id="t1", + run_id="run-a", + event_type="human_message" if i % 2 == 0 else "ai_message", + category="message", + content=f"msg-a-{i}", + ) + + all_msgs = await store.list_messages_by_run("t1", "run-a") + cursor_seq = all_msgs[4]["seq"] + msgs = await store.list_messages_by_run("t1", "run-a", before_seq=cursor_seq, limit=50) + assert all(m["seq"] < cursor_seq for m in msgs) + assert len(msgs) == 4 + + +@pytest.mark.anyio +async def test_list_messages_by_run_does_not_include_other_run(base_store): + store = base_store + for i in range(7): + await store.put( + thread_id="t1", + run_id="run-a", + event_type="human_message", + category="message", + content=f"msg-a-{i}", + ) + for i in range(3): + await store.put( + thread_id="t1", + run_id="run-b", + event_type="human_message", + category="message", + content=f"msg-b-{i}", + ) + + msgs = await store.list_messages_by_run("t1", "run-b") + assert len(msgs) == 3 + assert all(m["run_id"] == "run-b" for m in msgs) + + +@pytest.mark.anyio +async def test_list_messages_by_run_empty_run(base_store): + store = base_store + msgs = await store.list_messages_by_run("t1", "nonexistent") + assert msgs == [] diff --git a/backend/tests/test_run_journal.py b/backend/tests/test_run_journal.py new file mode 100644 index 000000000..2188eeef0 --- /dev/null +++ b/backend/tests/test_run_journal.py @@ -0,0 +1,442 @@ +"""Tests for RunJournal callback handler. + +Uses MemoryRunEventStore as the backend for direct event inspection. +""" + +import asyncio +from unittest.mock import MagicMock +from uuid import uuid4 + +import pytest + +from deerflow.runtime.events.store.memory import MemoryRunEventStore +from deerflow.runtime.journal import RunJournal + + +@pytest.fixture +def journal_setup(): + store = MemoryRunEventStore() + j = RunJournal("r1", "t1", store, flush_threshold=100) + return j, store + + +def _make_llm_response(content="Hello", usage=None, tool_calls=None, additional_kwargs=None): + """Create a mock LLM response with a message. + + model_dump() returns checkpoint-aligned format matching real AIMessage. + """ + msg = MagicMock() + msg.type = "ai" + msg.content = content + msg.id = f"msg-{id(msg)}" + msg.tool_calls = tool_calls or [] + msg.invalid_tool_calls = [] + msg.response_metadata = {"model_name": "test-model"} + msg.usage_metadata = usage + msg.additional_kwargs = additional_kwargs or {} + msg.name = None + # model_dump returns checkpoint-aligned format + msg.model_dump.return_value = { + "content": content, + "additional_kwargs": additional_kwargs or {}, + "response_metadata": {"model_name": "test-model"}, + "type": "ai", + "name": None, + "id": msg.id, + "tool_calls": tool_calls or [], + "invalid_tool_calls": [], + "usage_metadata": usage, + } + + gen = MagicMock() + gen.message = msg + + response = MagicMock() + response.generations = [[gen]] + return response + + +class TestLlmCallbacks: + @pytest.mark.anyio + async def test_on_llm_end_produces_trace_event(self, journal_setup): + j, store = journal_setup + run_id = uuid4() + j.on_llm_start({}, [], run_id=run_id, tags=["lead_agent"]) + j.on_llm_end(_make_llm_response("Hi"), run_id=run_id, parent_run_id=None, tags=["lead_agent"]) + await j.flush() + events = await store.list_events("t1", "r1") + trace_events = [e for e in events if e["event_type"] == "llm.ai.response"] + assert len(trace_events) == 1 + assert trace_events[0]["category"] == "message" + + @pytest.mark.anyio + async def test_on_llm_end_lead_agent_produces_ai_message(self, journal_setup): + j, store = journal_setup + run_id = uuid4() + j.on_llm_start({}, [], run_id=run_id, tags=["lead_agent"]) + j.on_llm_end(_make_llm_response("Answer"), run_id=run_id, parent_run_id=None, tags=["lead_agent"]) + await j.flush() + messages = await store.list_messages("t1") + assert len(messages) == 1 + assert messages[0]["event_type"] == "llm.ai.response" + # Content is checkpoint-aligned model_dump format + assert messages[0]["content"]["type"] == "ai" + assert messages[0]["content"]["content"] == "Answer" + + @pytest.mark.anyio + async def test_on_llm_end_with_tool_calls_produces_ai_tool_call(self, journal_setup): + """LLM response with pending tool_calls emits llm.ai.response with tool_calls in content.""" + j, store = journal_setup + run_id = uuid4() + j.on_llm_end( + _make_llm_response("Let me search", tool_calls=[{"id": "call_1", "name": "search", "args": {}}]), + run_id=run_id, + parent_run_id=None, + tags=["lead_agent"], + ) + await j.flush() + messages = await store.list_messages("t1") + assert len(messages) == 1 + assert messages[0]["event_type"] == "llm.ai.response" + assert len(messages[0]["content"]["tool_calls"]) == 1 + + @pytest.mark.anyio + async def test_on_llm_end_subagent_no_ai_message(self, journal_setup): + j, store = journal_setup + run_id = uuid4() + j.on_llm_start({}, [], run_id=run_id, tags=["subagent:research"]) + j.on_llm_end(_make_llm_response("Sub answer"), run_id=run_id, parent_run_id=None, tags=["subagent:research"]) + await j.flush() + messages = await store.list_messages("t1") + # subagent responses still emit llm.ai.response with category="message" + assert len(messages) == 1 + + @pytest.mark.anyio + async def test_token_accumulation(self, journal_setup): + j, store = journal_setup + usage1 = {"input_tokens": 10, "output_tokens": 5, "total_tokens": 15} + usage2 = {"input_tokens": 20, "output_tokens": 10, "total_tokens": 30} + j.on_llm_end(_make_llm_response("A", usage=usage1), run_id=uuid4(), parent_run_id=None, tags=["lead_agent"]) + j.on_llm_end(_make_llm_response("B", usage=usage2), run_id=uuid4(), parent_run_id=None, tags=["lead_agent"]) + assert j._total_input_tokens == 30 + assert j._total_output_tokens == 15 + assert j._total_tokens == 45 + assert j._llm_call_count == 2 + + @pytest.mark.anyio + async def test_total_tokens_computed_from_input_output(self, journal_setup): + """If total_tokens is 0, it should be computed from input + output.""" + j, store = journal_setup + j.on_llm_end( + _make_llm_response("Hi", usage={"input_tokens": 100, "output_tokens": 50, "total_tokens": 0}), + run_id=uuid4(), + parent_run_id=None, + tags=["lead_agent"], + ) + assert j._total_tokens == 150 + + @pytest.mark.anyio + async def test_caller_token_classification(self, journal_setup): + j, store = journal_setup + usage = {"input_tokens": 10, "output_tokens": 5, "total_tokens": 15} + j.on_llm_end(_make_llm_response("A", usage=usage), run_id=uuid4(), parent_run_id=None, tags=["lead_agent"]) + j.on_llm_end(_make_llm_response("B", usage=usage), run_id=uuid4(), parent_run_id=None, tags=["subagent:research"]) + j.on_llm_end(_make_llm_response("C", usage=usage), run_id=uuid4(), parent_run_id=None, tags=["middleware:summarization"]) + # token tracking not broken by caller type + assert j._total_tokens == 45 + assert j._llm_call_count == 3 + + @pytest.mark.anyio + async def test_usage_metadata_none_no_crash(self, journal_setup): + j, store = journal_setup + j.on_llm_end(_make_llm_response("No usage", usage=None), run_id=uuid4(), parent_run_id=None, tags=["lead_agent"]) + await j.flush() + + @pytest.mark.anyio + async def test_latency_tracking(self, journal_setup): + j, store = journal_setup + run_id = uuid4() + j.on_llm_start({}, [], run_id=run_id, tags=["lead_agent"]) + j.on_llm_end(_make_llm_response("Fast"), run_id=run_id, parent_run_id=None, tags=["lead_agent"]) + await j.flush() + events = await store.list_events("t1", "r1") + llm_resp = [e for e in events if e["event_type"] == "llm.ai.response"][0] + assert "latency_ms" in llm_resp["metadata"] + assert llm_resp["metadata"]["latency_ms"] is not None + + +class TestLifecycleCallbacks: + @pytest.mark.anyio + async def test_chain_start_end_produce_trace_events(self, journal_setup): + j, store = journal_setup + j.on_chain_start({}, {}, run_id=uuid4(), parent_run_id=None) + j.on_chain_end({}, run_id=uuid4()) + await asyncio.sleep(0.05) + await j.flush() + events = await store.list_events("t1", "r1") + types = {e["event_type"] for e in events} + assert "run.start" in types + assert "run.end" in types + + @pytest.mark.anyio + async def test_nested_chain_no_run_start(self, journal_setup): + """Nested chains (parent_run_id set) should NOT produce run.start.""" + j, store = journal_setup + parent_id = uuid4() + j.on_chain_start({}, {}, run_id=uuid4(), parent_run_id=parent_id) + j.on_chain_end({}, run_id=uuid4()) + await j.flush() + events = await store.list_events("t1", "r1") + assert not any(e["event_type"] == "run.start" for e in events) + + +class TestToolCallbacks: + @pytest.mark.anyio + async def test_tool_end_with_tool_message(self, journal_setup): + """on_tool_end with a ToolMessage stores it as llm.tool.result.""" + from langchain_core.messages import ToolMessage + + j, store = journal_setup + tool_msg = ToolMessage(content="results", tool_call_id="call_1", name="web_search") + j.on_tool_end(tool_msg, run_id=uuid4()) + await j.flush() + messages = await store.list_messages("t1") + assert len(messages) == 1 + assert messages[0]["event_type"] == "llm.tool.result" + assert messages[0]["content"]["type"] == "tool" + + @pytest.mark.anyio + async def test_tool_end_with_command_unwraps_tool_message(self, journal_setup): + """on_tool_end with Command(update={'messages':[ToolMessage]}) unwraps inner message.""" + from langchain_core.messages import ToolMessage + from langgraph.types import Command + + j, store = journal_setup + inner = ToolMessage(content="file list", tool_call_id="call_2", name="present_files") + cmd = Command(update={"messages": [inner]}) + j.on_tool_end(cmd, run_id=uuid4()) + await j.flush() + messages = await store.list_messages("t1") + assert len(messages) == 1 + assert messages[0]["event_type"] == "llm.tool.result" + assert messages[0]["content"]["content"] == "file list" + + @pytest.mark.anyio + async def test_on_tool_error_no_crash(self, journal_setup): + """on_tool_error should not crash (no event emitted by default).""" + j, store = journal_setup + j.on_tool_error(TimeoutError("timeout"), run_id=uuid4(), name="web_fetch") + await j.flush() + # Base implementation does not emit tool_error — just verify no crash + events = await store.list_events("t1", "r1") + assert isinstance(events, list) + + +class TestCustomEvents: + @pytest.mark.anyio + async def test_on_custom_event_not_implemented(self, journal_setup): + """RunJournal does not implement on_custom_event — no crash expected.""" + j, store = journal_setup + # BaseCallbackHandler.on_custom_event is a no-op by default + j.on_custom_event("task_running", {"task_id": "t1"}, run_id=uuid4()) + await j.flush() + events = await store.list_events("t1", "r1") + assert isinstance(events, list) + + +class TestBufferFlush: + @pytest.mark.anyio + async def test_flush_threshold(self, journal_setup): + j, store = journal_setup + j._flush_threshold = 2 + # Each on_llm_end emits 1 event + j.on_llm_end(_make_llm_response("A"), run_id=uuid4(), parent_run_id=None, tags=["lead_agent"]) + assert len(j._buffer) == 1 + j.on_llm_end(_make_llm_response("B"), run_id=uuid4(), parent_run_id=None, tags=["lead_agent"]) + # At threshold the buffer should have been flushed asynchronously + await asyncio.sleep(0.1) + events = await store.list_events("t1", "r1") + assert len(events) >= 2 + + @pytest.mark.anyio + async def test_events_retained_when_no_loop(self, journal_setup): + """Events buffered in a sync (no-loop) context should survive + until the async flush() in the finally block.""" + j, store = journal_setup + j._flush_threshold = 1 + + original = asyncio.get_running_loop + + def no_loop(): + raise RuntimeError("no running event loop") + + asyncio.get_running_loop = no_loop + try: + j._put(event_type="llm.ai.response", category="message", content="test") + finally: + asyncio.get_running_loop = original + + assert len(j._buffer) == 1 + await j.flush() + events = await store.list_events("t1", "r1") + assert any(e["event_type"] == "llm.ai.response" for e in events) + + +class TestIdentifyCaller: + def test_lead_agent_tag(self, journal_setup): + j, _ = journal_setup + assert j._identify_caller(["lead_agent"]) == "lead_agent" + + def test_subagent_tag(self, journal_setup): + j, _ = journal_setup + assert j._identify_caller(["subagent:research"]) == "subagent:research" + + def test_middleware_tag(self, journal_setup): + j, _ = journal_setup + assert j._identify_caller(["middleware:summarization"]) == "middleware:summarization" + + def test_no_tags_returns_lead_agent(self, journal_setup): + j, _ = journal_setup + assert j._identify_caller([]) == "lead_agent" + assert j._identify_caller(None) == "lead_agent" + + +class TestChainErrorCallback: + @pytest.mark.anyio + async def test_on_chain_error_writes_run_error(self, journal_setup): + j, store = journal_setup + j.on_chain_error(ValueError("boom"), run_id=uuid4()) + await asyncio.sleep(0.05) + await j.flush() + events = await store.list_events("t1", "r1") + error_events = [e for e in events if e["event_type"] == "run.error"] + assert len(error_events) == 1 + assert "boom" in error_events[0]["content"] + assert error_events[0]["metadata"]["error_type"] == "ValueError" + + +class TestTokenTrackingDisabled: + @pytest.mark.anyio + async def test_track_token_usage_false(self): + store = MemoryRunEventStore() + j = RunJournal("r1", "t1", store, track_token_usage=False, flush_threshold=100) + j.on_llm_end( + _make_llm_response("X", usage={"input_tokens": 50, "output_tokens": 50, "total_tokens": 100}), + run_id=uuid4(), + parent_run_id=None, + tags=["lead_agent"], + ) + data = j.get_completion_data() + assert data["total_tokens"] == 0 + assert data["llm_call_count"] == 0 + + +class TestConvenienceFields: + @pytest.mark.anyio + async def test_first_human_message_via_set(self, journal_setup): + j, _ = journal_setup + j.set_first_human_message("What is AI?") + data = j.get_completion_data() + assert data["first_human_message"] == "What is AI?" + + @pytest.mark.anyio + async def test_get_completion_data(self, journal_setup): + j, _ = journal_setup + j._total_tokens = 100 + j._msg_count = 5 + data = j.get_completion_data() + assert data["total_tokens"] == 100 + assert data["message_count"] == 5 + + +class TestMiddlewareEvents: + @pytest.mark.anyio + async def test_record_middleware_uses_middleware_category(self, journal_setup): + j, store = journal_setup + j.record_middleware( + "title", + name="TitleMiddleware", + hook="after_model", + action="generate_title", + changes={"title": "Test Title", "thread_id": "t1"}, + ) + await j.flush() + events = await store.list_events("t1", "r1") + mw_events = [e for e in events if e["event_type"] == "middleware:title"] + assert len(mw_events) == 1 + assert mw_events[0]["category"] == "middleware" + assert mw_events[0]["content"]["name"] == "TitleMiddleware" + assert mw_events[0]["content"]["hook"] == "after_model" + assert mw_events[0]["content"]["action"] == "generate_title" + assert mw_events[0]["content"]["changes"]["title"] == "Test Title" + + @pytest.mark.anyio + async def test_middleware_tag_variants(self, journal_setup): + """Different middleware tags produce distinct event_types.""" + j, store = journal_setup + j.record_middleware("title", name="TitleMiddleware", hook="after_model", action="generate_title", changes={}) + j.record_middleware("guardrail", name="GuardrailMiddleware", hook="before_tool", action="deny", changes={}) + await j.flush() + events = await store.list_events("t1", "r1") + event_types = {e["event_type"] for e in events} + assert "middleware:title" in event_types + assert "middleware:guardrail" in event_types + + +class TestChatModelStartHumanMessage: + """Tests for on_chat_model_start extracting the first human message.""" + + @pytest.mark.anyio + async def test_extracts_first_human_message(self, journal_setup): + """on_chat_model_start captures the first HumanMessage from prompts.""" + from langchain_core.messages import AIMessage, HumanMessage + + j, store = journal_setup + messages_batch = [ + [HumanMessage(content="What is AI?"), AIMessage(content="Hi there")], + ] + j.on_chat_model_start({}, messages_batch, run_id=uuid4(), tags=["lead_agent"]) + await j.flush() + + assert j._first_human_msg == "What is AI?" + events = await store.list_events("t1", "r1") + human_events = [e for e in events if e["event_type"] == "llm.human.input"] + assert len(human_events) == 1 + assert human_events[0]["content"]["content"] == "What is AI?" + + @pytest.mark.anyio + async def test_skips_summary_named_human_messages(self, journal_setup): + """HumanMessages with name='summary' are skipped.""" + from langchain_core.messages import HumanMessage + + j, store = journal_setup + messages_batch = [ + [HumanMessage(content="Summarized context", name="summary"), HumanMessage(content="Real question")], + ] + j.on_chat_model_start({}, messages_batch, run_id=uuid4(), tags=["lead_agent"]) + await j.flush() + + assert j._first_human_msg == "Real question" + + @pytest.mark.anyio + async def test_only_first_human_message_captured(self, journal_setup): + """Subsequent on_chat_model_start calls do not overwrite the first message.""" + from langchain_core.messages import HumanMessage + + j, store = journal_setup + j.on_chat_model_start({}, [[HumanMessage(content="First question")]], run_id=uuid4(), tags=["lead_agent"]) + j.on_chat_model_start({}, [[HumanMessage(content="Second question")]], run_id=uuid4(), tags=["lead_agent"]) + await j.flush() + + assert j._first_human_msg == "First question" + events = await store.list_events("t1", "r1") + human_events = [e for e in events if e["event_type"] == "llm.human.input"] + assert len(human_events) == 1 + + @pytest.mark.anyio + async def test_empty_messages_no_crash(self, journal_setup): + """on_chat_model_start with empty messages does not crash.""" + j, store = journal_setup + j.on_chat_model_start({}, [], run_id=uuid4(), tags=["lead_agent"]) + await j.flush() + assert j._first_human_msg is None diff --git a/backend/tests/test_run_manager.py b/backend/tests/test_run_manager.py index 2d6a0199c..58ecf1f26 100644 --- a/backend/tests/test_run_manager.py +++ b/backend/tests/test_run_manager.py @@ -75,27 +75,27 @@ async def test_cancel_not_inflight(manager: RunManager): @pytest.mark.anyio async def test_list_by_thread(manager: RunManager): - """Same thread should return multiple runs, newest first.""" + """Same thread should return multiple runs.""" r1 = await manager.create("thread-1") r2 = await manager.create("thread-1") await manager.create("thread-2") runs = await manager.list_by_thread("thread-1") assert len(runs) == 2 - assert runs[0].run_id == r2.run_id - assert runs[1].run_id == r1.run_id + assert runs[0].run_id == r1.run_id + assert runs[1].run_id == r2.run_id @pytest.mark.anyio async def test_list_by_thread_is_stable_when_timestamps_tie(manager: RunManager, monkeypatch: pytest.MonkeyPatch): - """Newest-first ordering should not depend on timestamp precision.""" + """Ordering should be stable (insertion order) even when timestamps tie.""" monkeypatch.setattr("deerflow.runtime.runs.manager._now_iso", lambda: "2026-01-01T00:00:00+00:00") r1 = await manager.create("thread-1") r2 = await manager.create("thread-1") runs = await manager.list_by_thread("thread-1") - assert [run.run_id for run in runs] == [r2.run_id, r1.run_id] + assert [run.run_id for run in runs] == [r1.run_id, r2.run_id] @pytest.mark.anyio diff --git a/backend/tests/test_run_repository.py b/backend/tests/test_run_repository.py new file mode 100644 index 000000000..34ab9b492 --- /dev/null +++ b/backend/tests/test_run_repository.py @@ -0,0 +1,196 @@ +"""Tests for RunRepository (SQLAlchemy-backed RunStore). + +Uses a temp SQLite DB to test ORM-backed CRUD operations. +""" + +import pytest + +from deerflow.persistence.run import RunRepository + + +async def _make_repo(tmp_path): + from deerflow.persistence.engine import get_session_factory, init_engine + + url = f"sqlite+aiosqlite:///{tmp_path / 'test.db'}" + await init_engine("sqlite", url=url, sqlite_dir=str(tmp_path)) + return RunRepository(get_session_factory()) + + +async def _cleanup(): + from deerflow.persistence.engine import close_engine + + await close_engine() + + +class TestRunRepository: + @pytest.mark.anyio + async def test_put_and_get(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.put("r1", thread_id="t1", status="pending") + row = await repo.get("r1") + assert row is not None + assert row["run_id"] == "r1" + assert row["thread_id"] == "t1" + assert row["status"] == "pending" + await _cleanup() + + @pytest.mark.anyio + async def test_get_missing_returns_none(self, tmp_path): + repo = await _make_repo(tmp_path) + assert await repo.get("nope") is None + await _cleanup() + + @pytest.mark.anyio + async def test_update_status(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.put("r1", thread_id="t1") + await repo.update_status("r1", "running") + row = await repo.get("r1") + assert row["status"] == "running" + await _cleanup() + + @pytest.mark.anyio + async def test_update_status_with_error(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.put("r1", thread_id="t1") + await repo.update_status("r1", "error", error="boom") + row = await repo.get("r1") + assert row["status"] == "error" + assert row["error"] == "boom" + await _cleanup() + + @pytest.mark.anyio + async def test_list_by_thread(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.put("r1", thread_id="t1") + await repo.put("r2", thread_id="t1") + await repo.put("r3", thread_id="t2") + rows = await repo.list_by_thread("t1") + assert len(rows) == 2 + assert all(r["thread_id"] == "t1" for r in rows) + await _cleanup() + + @pytest.mark.anyio + async def test_list_by_thread_owner_filter(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.put("r1", thread_id="t1", user_id="alice") + await repo.put("r2", thread_id="t1", user_id="bob") + rows = await repo.list_by_thread("t1", user_id="alice") + assert len(rows) == 1 + assert rows[0]["user_id"] == "alice" + await _cleanup() + + @pytest.mark.anyio + async def test_delete(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.put("r1", thread_id="t1") + await repo.delete("r1") + assert await repo.get("r1") is None + await _cleanup() + + @pytest.mark.anyio + async def test_delete_nonexistent_is_noop(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.delete("nope") # should not raise + await _cleanup() + + @pytest.mark.anyio + async def test_list_pending(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.put("r1", thread_id="t1", status="pending") + await repo.put("r2", thread_id="t1", status="running") + await repo.put("r3", thread_id="t2", status="pending") + pending = await repo.list_pending() + assert len(pending) == 2 + assert all(r["status"] == "pending" for r in pending) + await _cleanup() + + @pytest.mark.anyio + async def test_update_run_completion(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.put("r1", thread_id="t1", status="running") + await repo.update_run_completion( + "r1", + status="success", + total_input_tokens=100, + total_output_tokens=50, + total_tokens=150, + llm_call_count=2, + lead_agent_tokens=120, + subagent_tokens=20, + middleware_tokens=10, + message_count=3, + last_ai_message="The answer is 42", + first_human_message="What is the meaning?", + ) + row = await repo.get("r1") + assert row["status"] == "success" + assert row["total_tokens"] == 150 + assert row["llm_call_count"] == 2 + assert row["lead_agent_tokens"] == 120 + assert row["message_count"] == 3 + assert row["last_ai_message"] == "The answer is 42" + assert row["first_human_message"] == "What is the meaning?" + await _cleanup() + + @pytest.mark.anyio + async def test_metadata_preserved(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.put("r1", thread_id="t1", metadata={"key": "value"}) + row = await repo.get("r1") + assert row["metadata"] == {"key": "value"} + await _cleanup() + + @pytest.mark.anyio + async def test_kwargs_with_non_serializable(self, tmp_path): + """kwargs containing non-JSON-serializable objects should be safely handled.""" + repo = await _make_repo(tmp_path) + + class Dummy: + pass + + await repo.put("r1", thread_id="t1", kwargs={"obj": Dummy()}) + row = await repo.get("r1") + assert "obj" in row["kwargs"] + await _cleanup() + + @pytest.mark.anyio + async def test_update_run_completion_preserves_existing_fields(self, tmp_path): + """update_run_completion does not overwrite thread_id or assistant_id.""" + repo = await _make_repo(tmp_path) + await repo.put("r1", thread_id="t1", assistant_id="agent1", status="running") + await repo.update_run_completion("r1", status="success", total_tokens=100) + row = await repo.get("r1") + assert row["thread_id"] == "t1" + assert row["assistant_id"] == "agent1" + assert row["total_tokens"] == 100 + await _cleanup() + + @pytest.mark.anyio + async def test_list_by_thread_ordered_desc(self, tmp_path): + """list_by_thread returns newest first.""" + repo = await _make_repo(tmp_path) + await repo.put("r1", thread_id="t1", created_at="2024-01-01T00:00:00+00:00") + await repo.put("r2", thread_id="t1", created_at="2024-01-02T00:00:00+00:00") + rows = await repo.list_by_thread("t1") + assert rows[0]["run_id"] == "r2" + assert rows[1]["run_id"] == "r1" + await _cleanup() + + @pytest.mark.anyio + async def test_list_by_thread_limit(self, tmp_path): + repo = await _make_repo(tmp_path) + for i in range(5): + await repo.put(f"r{i}", thread_id="t1") + rows = await repo.list_by_thread("t1", limit=2) + assert len(rows) == 2 + await _cleanup() + + @pytest.mark.anyio + async def test_owner_none_returns_all(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.put("r1", thread_id="t1", user_id="alice") + await repo.put("r2", thread_id="t1", user_id="bob") + rows = await repo.list_by_thread("t1", user_id=None) + assert len(rows) == 2 + await _cleanup() diff --git a/backend/tests/test_run_worker_rollback.py b/backend/tests/test_run_worker_rollback.py index 714ccdde1..b2b8da77f 100644 --- a/backend/tests/test_run_worker_rollback.py +++ b/backend/tests/test_run_worker_rollback.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock, call import pytest -from deerflow.runtime.runs.worker import _rollback_to_pre_run_checkpoint +from deerflow.runtime.runs.worker import _agent_factory_supports_app_config, _build_runtime_context, _rollback_to_pre_run_checkpoint class FakeCheckpointer: @@ -212,3 +212,57 @@ async def test_rollback_propagates_aput_writes_failure(): # aput succeeded, aput_writes was called but failed checkpointer.aput.assert_awaited_once() checkpointer.aput_writes.assert_awaited_once() + + +def test_agent_factory_supports_app_config_detects_supported_signature(): + def factory(*, config, app_config=None): + return (config, app_config) + + assert _agent_factory_supports_app_config(factory) is True + + +def test_build_runtime_context_defaults_to_thread_and_run_id(): + ctx = _build_runtime_context("thread-1", "run-1", None) + assert ctx == {"thread_id": "thread-1", "run_id": "run-1"} + + +def test_build_runtime_context_merges_caller_context(): + """Regression for issue #2677: keys from ``config['context']`` (e.g. ``agent_name``) + must be merged into the Runtime's context so that ``ToolRuntime.context`` — which + is what ``setup_agent`` reads — can see them.""" + caller_context = {"agent_name": "my-agent", "is_bootstrap": True, "model_name": "gpt-4"} + + ctx = _build_runtime_context("thread-1", "run-1", caller_context) + + assert ctx["thread_id"] == "thread-1" + assert ctx["run_id"] == "run-1" + assert ctx["agent_name"] == "my-agent" + assert ctx["is_bootstrap"] is True + assert ctx["model_name"] == "gpt-4" + + +def test_build_runtime_context_caller_cannot_override_thread_id_or_run_id(): + """A malicious or buggy caller must not be able to overwrite the worker-assigned + ``thread_id`` / ``run_id`` by stuffing them into ``config['context']``.""" + caller_context = {"thread_id": "spoofed", "run_id": "spoofed", "agent_name": "ok"} + + ctx = _build_runtime_context("real-thread", "real-run", caller_context) + + assert ctx["thread_id"] == "real-thread" + assert ctx["run_id"] == "real-run" + assert ctx["agent_name"] == "ok" + + +def test_build_runtime_context_ignores_non_dict_caller_context(): + ctx = _build_runtime_context("thread-1", "run-1", "not-a-dict") + assert ctx == {"thread_id": "thread-1", "run_id": "run-1"} + + +def test_agent_factory_supports_app_config_returns_false_when_signature_lookup_fails(monkeypatch): + class BrokenCallable: + def __call__(self, **kwargs): + return kwargs + + monkeypatch.setattr("deerflow.runtime.runs.worker.inspect.signature", lambda _obj: (_ for _ in ()).throw(ValueError("boom"))) + + assert _agent_factory_supports_app_config(BrokenCallable()) is False diff --git a/backend/tests/test_runs_api_endpoints.py b/backend/tests/test_runs_api_endpoints.py new file mode 100644 index 000000000..1826e4d8e --- /dev/null +++ b/backend/tests/test_runs_api_endpoints.py @@ -0,0 +1,245 @@ +"""Tests for GET /api/runs/{run_id}/messages and GET /api/runs/{run_id}/feedback endpoints.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, MagicMock + +from _router_auth_helpers import make_authed_test_app +from fastapi.testclient import TestClient + +from app.gateway.routers import runs + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _make_app(run_store=None, event_store=None, feedback_repo=None): + """Build a test FastAPI app with stub auth and mocked state.""" + app = make_authed_test_app() + app.include_router(runs.router) + + if run_store is not None: + app.state.run_store = run_store + if event_store is not None: + app.state.run_event_store = event_store + if feedback_repo is not None: + app.state.feedback_repo = feedback_repo + + return app + + +def _make_run_store(run_record: dict | None): + """Return an AsyncMock run store whose get() returns run_record.""" + store = MagicMock() + store.get = AsyncMock(return_value=run_record) + return store + + +def _make_event_store(rows: list[dict]): + """Return an AsyncMock event store whose list_messages_by_run() returns rows.""" + store = MagicMock() + store.list_messages_by_run = AsyncMock(return_value=rows) + return store + + +def _make_message(seq: int) -> dict: + return {"seq": seq, "event_type": "on_chat_model_stream", "category": "message", "content": f"msg-{seq}"} + + +# --------------------------------------------------------------------------- +# Tests +# --------------------------------------------------------------------------- + + +def test_run_messages_returns_envelope(): + """GET /api/runs/{run_id}/messages returns {data: [...], has_more: bool}.""" + rows = [_make_message(i) for i in range(1, 4)] + run_record = {"run_id": "run-1", "thread_id": "thread-1"} + app = _make_app( + run_store=_make_run_store(run_record), + event_store=_make_event_store(rows), + ) + with TestClient(app) as client: + response = client.get("/api/runs/run-1/messages") + assert response.status_code == 200 + body = response.json() + assert "data" in body + assert "has_more" in body + assert body["has_more"] is False + assert len(body["data"]) == 3 + + +def test_run_messages_404_when_run_not_found(): + """Returns 404 when the run store returns None.""" + app = _make_app( + run_store=_make_run_store(None), + event_store=_make_event_store([]), + ) + with TestClient(app) as client: + response = client.get("/api/runs/missing-run/messages") + assert response.status_code == 404 + assert "missing-run" in response.json()["detail"] + + +def test_run_messages_has_more_true_when_extra_row_returned(): + """has_more=True when event store returns limit+1 rows.""" + # Default limit is 50; provide 51 rows + rows = [_make_message(i) for i in range(1, 52)] # 51 rows + run_record = {"run_id": "run-2", "thread_id": "thread-2"} + app = _make_app( + run_store=_make_run_store(run_record), + event_store=_make_event_store(rows), + ) + with TestClient(app) as client: + response = client.get("/api/runs/run-2/messages") + assert response.status_code == 200 + body = response.json() + assert body["has_more"] is True + assert len(body["data"]) == 50 # trimmed to limit + + +def test_run_messages_passes_after_seq_to_event_store(): + """after_seq query param is forwarded to event_store.list_messages_by_run.""" + rows = [_make_message(10)] + run_record = {"run_id": "run-3", "thread_id": "thread-3"} + event_store = _make_event_store(rows) + app = _make_app( + run_store=_make_run_store(run_record), + event_store=event_store, + ) + with TestClient(app) as client: + response = client.get("/api/runs/run-3/messages?after_seq=5") + assert response.status_code == 200 + event_store.list_messages_by_run.assert_awaited_once_with( + "thread-3", + "run-3", + limit=51, # default limit(50) + 1 + before_seq=None, + after_seq=5, + ) + + +def test_run_messages_respects_custom_limit(): + """Custom limit is respected and capped at 200.""" + rows = [_make_message(i) for i in range(1, 6)] + run_record = {"run_id": "run-4", "thread_id": "thread-4"} + event_store = _make_event_store(rows) + app = _make_app( + run_store=_make_run_store(run_record), + event_store=event_store, + ) + with TestClient(app) as client: + response = client.get("/api/runs/run-4/messages?limit=10") + assert response.status_code == 200 + event_store.list_messages_by_run.assert_awaited_once_with( + "thread-4", + "run-4", + limit=11, # 10 + 1 + before_seq=None, + after_seq=None, + ) + + +def test_run_messages_passes_before_seq_to_event_store(): + """before_seq query param is forwarded to event_store.list_messages_by_run.""" + rows = [_make_message(3)] + run_record = {"run_id": "run-5", "thread_id": "thread-5"} + event_store = _make_event_store(rows) + app = _make_app( + run_store=_make_run_store(run_record), + event_store=event_store, + ) + with TestClient(app) as client: + response = client.get("/api/runs/run-5/messages?before_seq=10") + assert response.status_code == 200 + event_store.list_messages_by_run.assert_awaited_once_with( + "thread-5", + "run-5", + limit=51, + before_seq=10, + after_seq=None, + ) + + +def test_run_messages_empty_data(): + """Returns empty data list when no messages exist.""" + run_record = {"run_id": "run-6", "thread_id": "thread-6"} + app = _make_app( + run_store=_make_run_store(run_record), + event_store=_make_event_store([]), + ) + with TestClient(app) as client: + response = client.get("/api/runs/run-6/messages") + assert response.status_code == 200 + body = response.json() + assert body["data"] == [] + assert body["has_more"] is False + + +def _make_feedback_repo(rows: list[dict]): + """Return an AsyncMock feedback repo whose list_by_run() returns rows.""" + repo = MagicMock() + repo.list_by_run = AsyncMock(return_value=rows) + return repo + + +def _make_feedback(run_id: str, idx: int) -> dict: + return {"id": f"fb-{idx}", "run_id": run_id, "thread_id": "thread-x", "value": "up"} + + +# --------------------------------------------------------------------------- +# TestRunFeedback +# --------------------------------------------------------------------------- + + +class TestRunFeedback: + def test_returns_list_of_feedback_dicts(self): + """GET /api/runs/{run_id}/feedback returns a list of feedback dicts.""" + run_record = {"run_id": "run-fb-1", "thread_id": "thread-fb-1"} + rows = [_make_feedback("run-fb-1", i) for i in range(3)] + app = _make_app( + run_store=_make_run_store(run_record), + feedback_repo=_make_feedback_repo(rows), + ) + with TestClient(app) as client: + response = client.get("/api/runs/run-fb-1/feedback") + assert response.status_code == 200 + body = response.json() + assert isinstance(body, list) + assert len(body) == 3 + + def test_404_when_run_not_found(self): + """Returns 404 when run store returns None.""" + app = _make_app( + run_store=_make_run_store(None), + feedback_repo=_make_feedback_repo([]), + ) + with TestClient(app) as client: + response = client.get("/api/runs/missing-run/feedback") + assert response.status_code == 404 + assert "missing-run" in response.json()["detail"] + + def test_empty_list_when_no_feedback(self): + """Returns empty list when no feedback exists for the run.""" + run_record = {"run_id": "run-fb-2", "thread_id": "thread-fb-2"} + app = _make_app( + run_store=_make_run_store(run_record), + feedback_repo=_make_feedback_repo([]), + ) + with TestClient(app) as client: + response = client.get("/api/runs/run-fb-2/feedback") + assert response.status_code == 200 + assert response.json() == [] + + def test_503_when_feedback_repo_not_configured(self): + """Returns 503 when feedback_repo is None (no DB configured).""" + run_record = {"run_id": "run-fb-3", "thread_id": "thread-fb-3"} + app = _make_app( + run_store=_make_run_store(run_record), + ) + # Explicitly set feedback_repo to None to simulate missing DB + app.state.feedback_repo = None + with TestClient(app) as client: + response = client.get("/api/runs/run-fb-3/feedback") + assert response.status_code == 503 diff --git a/backend/tests/test_sandbox_tools_security.py b/backend/tests/test_sandbox_tools_security.py index 8c67cd50a..57466a0fe 100644 --- a/backend/tests/test_sandbox_tools_security.py +++ b/backend/tests/test_sandbox_tools_security.py @@ -346,6 +346,104 @@ def test_validate_local_bash_command_paths_blocks_traversal_in_skills() -> None: ) +@pytest.mark.parametrize( + "command", + [ + "cat ../uploads/secret.txt", + "cat subdir/../../secret.txt", + "python script.py --input=../secret.txt", + "echo ok > ../outputs/result.txt", + ], +) +def test_validate_local_bash_command_paths_blocks_relative_dotdot_segments(command: str) -> None: + with pytest.raises(PermissionError, match="path traversal"): + validate_local_bash_command_paths(command, _THREAD_DATA) + + +def test_validate_local_bash_command_paths_blocks_cd_root_escape() -> None: + with pytest.raises(PermissionError, match="Unsafe working directory"): + validate_local_bash_command_paths("cd / && cat etc/passwd", _THREAD_DATA) + + +def test_validate_local_bash_command_paths_blocks_cd_parent_escape() -> None: + with pytest.raises(PermissionError, match="path traversal"): + validate_local_bash_command_paths("cd .. && cat etc/passwd", _THREAD_DATA) + + +def test_validate_local_bash_command_paths_blocks_cd_env_var_escape() -> None: + with pytest.raises(PermissionError, match="Unsafe working directory"): + validate_local_bash_command_paths("cd $HOME && cat .ssh/id_rsa", _THREAD_DATA) + + +def test_validate_local_bash_command_paths_blocks_multiline_cd_escape() -> None: + with pytest.raises(PermissionError, match="Unsafe working directory"): + validate_local_bash_command_paths("echo ok\ncd $HOME && cat .ssh/id_rsa", _THREAD_DATA) + + +@pytest.mark.parametrize( + "command", + [ + "command cd / && cat etc/passwd", + "builtin cd $HOME && cat .ssh/id_rsa", + "if cd $HOME; then cat .ssh/id_rsa; fi", + "{ cd /; cat etc/passwd; }", + 'echo "$(cd $HOME && cat .ssh/id_rsa)"', + ], +) +def test_validate_local_bash_command_paths_blocks_complex_cd_escapes(command: str) -> None: + with pytest.raises(PermissionError, match="Unsafe working directory"): + validate_local_bash_command_paths(command, _THREAD_DATA) + + +@pytest.mark.parametrize( + "command", + [ + "ls /", + "ln -s / root && cat root/etc/passwd", + "command ls /", + ], +) +def test_validate_local_bash_command_paths_blocks_bare_root_path(command: str) -> None: + with pytest.raises(PermissionError, match="Unsafe absolute paths"): + validate_local_bash_command_paths(command, _THREAD_DATA) + + +@pytest.mark.parametrize( + "command", + [ + "echo cd /", + "printf '%s\\n' pushd /", + ], +) +def test_validate_local_bash_command_paths_allows_cd_words_as_arguments(command: str) -> None: + validate_local_bash_command_paths(command, _THREAD_DATA) + + +def test_validate_local_bash_command_paths_allows_workspace_relative_paths() -> None: + validate_local_bash_command_paths( + "mkdir -p reports && python script.py data/input.csv > reports/out.txt", + _THREAD_DATA, + ) + + +def test_validate_local_bash_command_paths_allows_cd_virtual_workspace_with_relative_paths() -> None: + validate_local_bash_command_paths( + "cd /mnt/user-data/workspace && cat data/input.csv > reports/out.txt", + _THREAD_DATA, + ) + + +def test_validate_local_bash_command_paths_allows_http_url_dotdot_segments() -> None: + validate_local_bash_command_paths( + "curl https://example.com/packages/../archive.tar.gz -o /mnt/user-data/workspace/archive.tar.gz", + _THREAD_DATA, + ) + validate_local_bash_command_paths( + "curl http://example.com/packages/../archive.tar.gz -o /mnt/user-data/workspace/archive.tar.gz", + _THREAD_DATA, + ) + + def test_bash_tool_rejects_host_bash_when_local_sandbox_default(monkeypatch) -> None: runtime = SimpleNamespace( state={"sandbox": {"sandbox_id": "local"}, "thread_data": _THREAD_DATA.copy()}, @@ -367,6 +465,28 @@ def test_bash_tool_rejects_host_bash_when_local_sandbox_default(monkeypatch) -> assert "Host bash execution is disabled" in result +def test_bash_tool_blocks_relative_traversal_before_host_execution(monkeypatch) -> None: + runtime = SimpleNamespace( + state={"sandbox": {"sandbox_id": "local"}, "thread_data": _THREAD_DATA.copy()}, + context={"thread_id": "thread-1"}, + ) + + monkeypatch.setattr( + "deerflow.sandbox.tools.ensure_sandbox_initialized", + lambda runtime: SimpleNamespace(execute_command=lambda command: pytest.fail("unsafe command should not execute")), + ) + monkeypatch.setattr("deerflow.sandbox.tools.ensure_thread_directories_exist", lambda runtime: None) + monkeypatch.setattr("deerflow.sandbox.tools.is_host_bash_allowed", lambda: True) + + result = bash_tool.func( + runtime=runtime, + description="run command", + command="cat ../uploads/secret.txt", + ) + + assert "path traversal" in result + + # ---------- Skills path tests ---------- diff --git a/backend/tests/test_security_scanner.py b/backend/tests/test_security_scanner.py index 4dcaa691c..088cb2c11 100644 --- a/backend/tests/test_security_scanner.py +++ b/backend/tests/test_security_scanner.py @@ -5,6 +5,27 @@ import pytest from deerflow.skills.security_scanner import scan_skill_content +@pytest.mark.anyio +async def test_scan_skill_content_passes_run_name_to_model(monkeypatch): + config = SimpleNamespace(skill_evolution=SimpleNamespace(moderation_model_name=None)) + fake_response = SimpleNamespace(content='{"decision":"allow","reason":"ok"}') + + class FakeModel: + async def ainvoke(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + return fake_response + + model = FakeModel() + monkeypatch.setattr("deerflow.skills.security_scanner.get_app_config", lambda: config) + monkeypatch.setattr("deerflow.skills.security_scanner.create_chat_model", lambda **kwargs: model) + + result = await scan_skill_content("---\nname: demo-skill\ndescription: demo\n---\n", executable=False) + + assert result.decision == "allow" + assert model.kwargs["config"] == {"run_name": "security_agent"} + + @pytest.mark.anyio async def test_scan_skill_content_blocks_when_model_unavailable(monkeypatch): config = SimpleNamespace(skill_evolution=SimpleNamespace(moderation_model_name=None)) diff --git a/backend/tests/test_skill_manage_tool.py b/backend/tests/test_skill_manage_tool.py index 1b16fb48f..3933cb208 100644 --- a/backend/tests/test_skill_manage_tool.py +++ b/backend/tests/test_skill_manage_tool.py @@ -20,11 +20,10 @@ async def _async_result(decision: str, reason: str): def test_skill_manage_create_and_patch(monkeypatch, tmp_path): skills_root = tmp_path / "skills" config = SimpleNamespace( - skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills"), + skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills", use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), skill_evolution=SimpleNamespace(enabled=True, moderation_model_name=None), ) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) - monkeypatch.setattr("deerflow.skills.manager.get_app_config", lambda: config) monkeypatch.setattr("deerflow.skills.security_scanner.get_app_config", lambda: config) refresh_calls = [] @@ -64,11 +63,10 @@ def test_skill_manage_create_and_patch(monkeypatch, tmp_path): def test_skill_manage_patch_replaces_single_occurrence_by_default(monkeypatch, tmp_path): skills_root = tmp_path / "skills" config = SimpleNamespace( - skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills"), + skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills", use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), skill_evolution=SimpleNamespace(enabled=True, moderation_model_name=None), ) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) - monkeypatch.setattr("deerflow.skills.manager.get_app_config", lambda: config) monkeypatch.setattr("deerflow.skills.security_scanner.get_app_config", lambda: config) async def _refresh(): @@ -104,11 +102,10 @@ def test_skill_manage_rejects_public_skill_patch(monkeypatch, tmp_path): public_dir.mkdir(parents=True, exist_ok=True) (public_dir / "SKILL.md").write_text(_skill_content("deep-research"), encoding="utf-8") config = SimpleNamespace( - skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills"), + skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills", use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), skill_evolution=SimpleNamespace(enabled=True, moderation_model_name=None), ) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) - monkeypatch.setattr("deerflow.skills.manager.get_app_config", lambda: config) runtime = SimpleNamespace(context={}, config={"configurable": {}}) @@ -128,11 +125,10 @@ def test_skill_manage_rejects_public_skill_patch(monkeypatch, tmp_path): def test_skill_manage_sync_wrapper_supported(monkeypatch, tmp_path): skills_root = tmp_path / "skills" config = SimpleNamespace( - skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills"), + skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills", use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), skill_evolution=SimpleNamespace(enabled=True, moderation_model_name=None), ) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) - monkeypatch.setattr("deerflow.skills.manager.get_app_config", lambda: config) refresh_calls = [] async def _refresh(): @@ -156,11 +152,10 @@ def test_skill_manage_sync_wrapper_supported(monkeypatch, tmp_path): def test_skill_manage_rejects_support_path_traversal(monkeypatch, tmp_path): skills_root = tmp_path / "skills" config = SimpleNamespace( - skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills"), + skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills", use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), skill_evolution=SimpleNamespace(enabled=True, moderation_model_name=None), ) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) - monkeypatch.setattr("deerflow.skills.manager.get_app_config", lambda: config) monkeypatch.setattr("deerflow.skills.security_scanner.get_app_config", lambda: config) async def _refresh(): diff --git a/backend/tests/test_skills_bundled.py b/backend/tests/test_skills_bundled.py new file mode 100644 index 000000000..0e99997a2 --- /dev/null +++ b/backend/tests/test_skills_bundled.py @@ -0,0 +1,31 @@ +"""Validate every bundled SKILL.md under skills/public/. + +Catches regressions like #2443 — a SKILL.md whose YAML front-matter fails to +parse (e.g. an unquoted description containing a colon, which YAML interprets +as a nested mapping). Each bundled skill is checked individually so the +failure message identifies the exact file. +""" + +from pathlib import Path + +import pytest + +from deerflow.skills.validation import _validate_skill_frontmatter + +SKILLS_PUBLIC_DIR = Path(__file__).resolve().parents[2] / "skills" / "public" +BUNDLED_SKILL_DIRS = sorted(p.parent for p in SKILLS_PUBLIC_DIR.rglob("SKILL.md")) + + +@pytest.mark.parametrize( + "skill_dir", + BUNDLED_SKILL_DIRS, + ids=lambda p: str(p.relative_to(SKILLS_PUBLIC_DIR)), +) +def test_bundled_skill_frontmatter_is_valid(skill_dir: Path) -> None: + valid, msg, name = _validate_skill_frontmatter(skill_dir) + assert valid, f"{skill_dir.relative_to(SKILLS_PUBLIC_DIR)}: {msg}" + assert name, f"{skill_dir.relative_to(SKILLS_PUBLIC_DIR)}: no name extracted" + + +def test_skills_public_dir_has_skills() -> None: + assert BUNDLED_SKILL_DIRS, f"no SKILL.md found under {SKILLS_PUBLIC_DIR}" diff --git a/backend/tests/test_skills_custom_router.py b/backend/tests/test_skills_custom_router.py index e78eb54d7..ed93e5510 100644 --- a/backend/tests/test_skills_custom_router.py +++ b/backend/tests/test_skills_custom_router.py @@ -1,5 +1,6 @@ import errno import json +import zipfile from pathlib import Path from types import SimpleNamespace @@ -7,7 +8,7 @@ from fastapi import FastAPI from fastapi.testclient import TestClient from app.gateway.routers import skills as skills_router -from deerflow.skills.manager import get_skill_history_file +from deerflow.skills.storage import get_or_new_skill_storage from deerflow.skills.types import Skill @@ -35,17 +36,118 @@ def _make_skill(name: str, *, enabled: bool) -> Skill: ) +def _make_test_app(config) -> FastAPI: + app = FastAPI() + app.state.config = config + app.include_router(skills_router.router) + return app + + +def _make_skill_archive(tmp_path: Path, name: str, content: str | None = None) -> Path: + archive = tmp_path / f"{name}.skill" + skill_content = content or _skill_content(name) + with zipfile.ZipFile(archive, "w") as zf: + zf.writestr(f"{name}/SKILL.md", skill_content) + return archive + + +def test_install_skill_archive_runs_security_scan(monkeypatch, tmp_path): + skills_root = tmp_path / "skills" + (skills_root / "custom").mkdir(parents=True) + archive = _make_skill_archive(tmp_path, "archive-skill") + scan_calls = [] + refresh_calls = [] + + async def _scan(content, *, executable, location, app_config=None): + from deerflow.skills.security_scanner import ScanResult + + scan_calls.append({"content": content, "executable": executable, "location": location}) + return ScanResult(decision="allow", reason="ok") + + async def _refresh(): + refresh_calls.append("refresh") + + from types import SimpleNamespace + + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + + storage = LocalSkillStorage(host_path=str(skills_root)) + config = SimpleNamespace( + skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills", use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), + skill_evolution=SimpleNamespace(enabled=True, moderation_model_name=None), + ) + monkeypatch.setattr(skills_router, "resolve_thread_virtual_path", lambda thread_id, path: archive) + monkeypatch.setattr(skills_router, "get_or_new_skill_storage", lambda **kw: storage) + monkeypatch.setattr("deerflow.skills.installer.scan_skill_content", _scan) + monkeypatch.setattr(skills_router, "refresh_skills_system_prompt_cache_async", _refresh) + + app = _make_test_app(config) + + with TestClient(app) as client: + response = client.post("/api/skills/install", json={"thread_id": "thread-1", "path": "mnt/user-data/outputs/archive-skill.skill"}) + + assert response.status_code == 200 + assert response.json()["skill_name"] == "archive-skill" + assert (skills_root / "custom" / "archive-skill" / "SKILL.md").exists() + assert scan_calls == [ + { + "content": _skill_content("archive-skill"), + "executable": False, + "location": "archive-skill/SKILL.md", + } + ] + assert refresh_calls == ["refresh"] + + +def test_install_skill_archive_security_scan_block_returns_400(monkeypatch, tmp_path): + skills_root = tmp_path / "skills" + (skills_root / "custom").mkdir(parents=True) + archive = _make_skill_archive(tmp_path, "blocked-skill") + refresh_calls = [] + + async def _scan(*args, **kwargs): + from deerflow.skills.security_scanner import ScanResult + + return ScanResult(decision="block", reason="prompt injection") + + async def _refresh(): + refresh_calls.append("refresh") + + from types import SimpleNamespace + + from deerflow.skills.storage.local_skill_storage import LocalSkillStorage + + storage = LocalSkillStorage(host_path=str(skills_root)) + config = SimpleNamespace( + skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills", use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), + skill_evolution=SimpleNamespace(enabled=True, moderation_model_name=None), + ) + monkeypatch.setattr(skills_router, "resolve_thread_virtual_path", lambda thread_id, path: archive) + monkeypatch.setattr(skills_router, "get_or_new_skill_storage", lambda **kw: storage) + monkeypatch.setattr("deerflow.skills.installer.scan_skill_content", _scan) + monkeypatch.setattr(skills_router, "refresh_skills_system_prompt_cache_async", _refresh) + + app = _make_test_app(config) + + with TestClient(app) as client: + response = client.post("/api/skills/install", json={"thread_id": "thread-1", "path": "mnt/user-data/outputs/blocked-skill.skill"}) + + assert response.status_code == 400 + assert "Security scan blocked skill 'blocked-skill': prompt injection" in response.json()["detail"] + assert not (skills_root / "custom" / "blocked-skill").exists() + assert refresh_calls == [] + + def test_custom_skills_router_lifecycle(monkeypatch, tmp_path): skills_root = tmp_path / "skills" custom_dir = skills_root / "custom" / "demo-skill" custom_dir.mkdir(parents=True, exist_ok=True) (custom_dir / "SKILL.md").write_text(_skill_content("demo-skill"), encoding="utf-8") config = SimpleNamespace( - skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills"), + skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills", use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), skill_evolution=SimpleNamespace(enabled=True, moderation_model_name=None), ) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) - monkeypatch.setattr("deerflow.skills.manager.get_app_config", lambda: config) monkeypatch.setattr("app.gateway.routers.skills.scan_skill_content", lambda *args, **kwargs: _async_scan("allow", "ok")) refresh_calls = [] @@ -54,8 +156,7 @@ def test_custom_skills_router_lifecycle(monkeypatch, tmp_path): monkeypatch.setattr("app.gateway.routers.skills.refresh_skills_system_prompt_cache_async", _refresh) - app = FastAPI() - app.include_router(skills_router.router) + app = _make_test_app(config) with TestClient(app) as client: response = client.get("/api/skills/custom") @@ -91,12 +192,13 @@ def test_custom_skill_rollback_blocked_by_scanner(monkeypatch, tmp_path): edited_content = _skill_content("demo-skill", "Edited skill") (custom_dir / "SKILL.md").write_text(edited_content, encoding="utf-8") config = SimpleNamespace( - skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills"), + skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills", use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), skill_evolution=SimpleNamespace(enabled=True, moderation_model_name=None), ) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) - monkeypatch.setattr("deerflow.skills.manager.get_app_config", lambda: config) - get_skill_history_file("demo-skill").write_text( + history_file = get_or_new_skill_storage(app_config=config).get_skill_history_file("demo-skill") + history_file.parent.mkdir(parents=True, exist_ok=True) + history_file.write_text( '{"action":"human_edit","prev_content":' + json.dumps(original_content) + ',"new_content":' + json.dumps(edited_content) + "}\n", encoding="utf-8", ) @@ -113,8 +215,7 @@ def test_custom_skill_rollback_blocked_by_scanner(monkeypatch, tmp_path): monkeypatch.setattr("app.gateway.routers.skills.scan_skill_content", _scan) - app = FastAPI() - app.include_router(skills_router.router) + app = _make_test_app(config) with TestClient(app) as client: rollback_response = client.post("/api/skills/custom/demo-skill/rollback", json={"history_index": -1}) @@ -133,11 +234,10 @@ def test_custom_skill_delete_preserves_history_and_allows_restore(monkeypatch, t original_content = _skill_content("demo-skill") (custom_dir / "SKILL.md").write_text(original_content, encoding="utf-8") config = SimpleNamespace( - skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills"), + skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills", use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), skill_evolution=SimpleNamespace(enabled=True, moderation_model_name=None), ) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) - monkeypatch.setattr("deerflow.skills.manager.get_app_config", lambda: config) monkeypatch.setattr("app.gateway.routers.skills.scan_skill_content", lambda *args, **kwargs: _async_scan("allow", "ok")) refresh_calls = [] @@ -146,8 +246,7 @@ def test_custom_skill_delete_preserves_history_and_allows_restore(monkeypatch, t monkeypatch.setattr("app.gateway.routers.skills.refresh_skills_system_prompt_cache_async", _refresh) - app = FastAPI() - app.include_router(skills_router.router) + app = _make_test_app(config) with TestClient(app) as client: delete_response = client.delete("/api/skills/custom/demo-skill") @@ -171,11 +270,10 @@ def test_custom_skill_delete_continues_when_history_write_is_readonly(monkeypatc custom_dir.mkdir(parents=True, exist_ok=True) (custom_dir / "SKILL.md").write_text(_skill_content("demo-skill"), encoding="utf-8") config = SimpleNamespace( - skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills"), + skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills", use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), skill_evolution=SimpleNamespace(enabled=True, moderation_model_name=None), ) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) - monkeypatch.setattr("deerflow.skills.manager.get_app_config", lambda: config) refresh_calls = [] async def _refresh(): @@ -184,11 +282,10 @@ def test_custom_skill_delete_continues_when_history_write_is_readonly(monkeypatc def _readonly_history(*args, **kwargs): raise OSError(errno.EROFS, "Read-only file system", str(skills_root / "custom" / ".history")) - monkeypatch.setattr("app.gateway.routers.skills.append_history", _readonly_history) + monkeypatch.setattr("deerflow.skills.storage.local_skill_storage.LocalSkillStorage.append_history", _readonly_history) monkeypatch.setattr("app.gateway.routers.skills.refresh_skills_system_prompt_cache_async", _refresh) - app = FastAPI() - app.include_router(skills_router.router) + app = _make_test_app(config) with TestClient(app) as client: delete_response = client.delete("/api/skills/custom/demo-skill") @@ -205,11 +302,10 @@ def test_custom_skill_delete_fails_when_skill_dir_removal_fails(monkeypatch, tmp custom_dir.mkdir(parents=True, exist_ok=True) (custom_dir / "SKILL.md").write_text(_skill_content("demo-skill"), encoding="utf-8") config = SimpleNamespace( - skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills"), + skills=SimpleNamespace(get_skills_path=lambda: skills_root, container_path="/mnt/skills", use="deerflow.skills.storage.local_skill_storage:LocalSkillStorage"), skill_evolution=SimpleNamespace(enabled=True, moderation_model_name=None), ) monkeypatch.setattr("deerflow.config.get_app_config", lambda: config) - monkeypatch.setattr("deerflow.skills.manager.get_app_config", lambda: config) refresh_calls = [] async def _refresh(): @@ -218,11 +314,10 @@ def test_custom_skill_delete_fails_when_skill_dir_removal_fails(monkeypatch, tmp def _fail_rmtree(*args, **kwargs): raise PermissionError(errno.EACCES, "Permission denied", str(custom_dir)) - monkeypatch.setattr("app.gateway.routers.skills.shutil.rmtree", _fail_rmtree) + monkeypatch.setattr("deerflow.skills.storage.local_skill_storage.shutil.rmtree", _fail_rmtree) monkeypatch.setattr("app.gateway.routers.skills.refresh_skills_system_prompt_cache_async", _refresh) - app = FastAPI() - app.include_router(skills_router.router) + app = _make_test_app(config) with TestClient(app) as client: delete_response = client.delete("/api/skills/custom/demo-skill") @@ -248,14 +343,14 @@ def test_update_skill_refreshes_prompt_cache_before_return(monkeypatch, tmp_path refresh_calls.append("refresh") enabled_state["value"] = False - monkeypatch.setattr("app.gateway.routers.skills.load_skills", _load_skills) + mock_storage = SimpleNamespace(load_skills=_load_skills) + monkeypatch.setattr("app.gateway.routers.skills.get_or_new_skill_storage", lambda **kwargs: mock_storage) monkeypatch.setattr("app.gateway.routers.skills.get_extensions_config", lambda: SimpleNamespace(mcp_servers={}, skills={})) monkeypatch.setattr("app.gateway.routers.skills.reload_extensions_config", lambda: None) monkeypatch.setattr(skills_router.ExtensionsConfig, "resolve_config_path", staticmethod(lambda: config_path)) monkeypatch.setattr("app.gateway.routers.skills.refresh_skills_system_prompt_cache_async", _refresh) - app = FastAPI() - app.include_router(skills_router.router) + app = _make_test_app(SimpleNamespace()) with TestClient(app) as client: response = client.put("/api/skills/demo-skill", json={"enabled": False}) diff --git a/backend/tests/test_skills_installer.py b/backend/tests/test_skills_installer.py index c5da4b070..101f1b2a8 100644 --- a/backend/tests/test_skills_installer.py +++ b/backend/tests/test_skills_installer.py @@ -1,5 +1,6 @@ """Tests for deerflow.skills.installer — shared skill installation logic.""" +import shutil import stat import zipfile from pathlib import Path @@ -7,13 +8,15 @@ from pathlib import Path import pytest from deerflow.skills.installer import ( - install_skill_from_archive, + SkillSecurityScanError, is_symlink_member, is_unsafe_zip_member, resolve_skill_dir_from_archive, safe_extract_skill_archive, should_ignore_archive_entry, ) +from deerflow.skills.security_scanner import ScanResult +from deerflow.skills.storage import get_or_new_skill_storage # --------------------------------------------------------------------------- # is_unsafe_zip_member @@ -169,6 +172,13 @@ class TestSafeExtract: class TestInstallSkillFromArchive: + @pytest.fixture(autouse=True) + def _allow_security_scan(self, monkeypatch): + async def _scan(*args, **kwargs): + return ScanResult(decision="allow", reason="ok") + + monkeypatch.setattr("deerflow.skills.installer.scan_skill_content", _scan) + def _make_skill_zip(self, tmp_path: Path, skill_name: str = "test-skill") -> Path: """Create a valid .skill archive.""" zip_path = tmp_path / f"{skill_name}.skill" @@ -183,23 +193,195 @@ class TestInstallSkillFromArchive: zip_path = self._make_skill_zip(tmp_path) skills_root = tmp_path / "skills" skills_root.mkdir() - result = install_skill_from_archive(zip_path, skills_root=skills_root) + result = get_or_new_skill_storage(skills_path=skills_root).install_skill_from_archive(zip_path) assert result["success"] is True assert result["skill_name"] == "test-skill" assert (skills_root / "custom" / "test-skill" / "SKILL.md").exists() + def test_scans_skill_markdown_before_install(self, tmp_path, monkeypatch): + zip_path = self._make_skill_zip(tmp_path) + skills_root = tmp_path / "skills" + skills_root.mkdir() + calls = [] + + async def _scan(content, *, executable, location): + calls.append({"content": content, "executable": executable, "location": location}) + return ScanResult(decision="allow", reason="ok") + + monkeypatch.setattr("deerflow.skills.installer.scan_skill_content", _scan) + + get_or_new_skill_storage(skills_path=skills_root).install_skill_from_archive(zip_path) + + assert calls == [ + { + "content": "---\nname: test-skill\ndescription: A test skill\n---\n\n# test-skill\n", + "executable": False, + "location": "test-skill/SKILL.md", + } + ] + + def test_scans_support_files_and_scripts_before_install(self, tmp_path, monkeypatch): + zip_path = tmp_path / "test-skill.skill" + with zipfile.ZipFile(zip_path, "w") as zf: + zf.writestr("test-skill/SKILL.md", "---\nname: test-skill\ndescription: A test skill\n---\n\n# test-skill\n") + zf.writestr("test-skill/references/guide.md", "# Guide\n") + zf.writestr("test-skill/templates/prompt.txt", "Use care.\n") + zf.writestr("test-skill/scripts/run.sh", "#!/bin/sh\necho ok\n") + zf.writestr("test-skill/assets/logo.png", b"\x89PNG\r\n\x1a\n") + zf.writestr("test-skill/references/.env", "TOKEN=secret\n") + zf.writestr("test-skill/templates/config.cfg", "TOKEN=secret\n") + skills_root = tmp_path / "skills" + skills_root.mkdir() + calls = [] + + async def _scan(content, *, executable, location): + calls.append({"content": content, "executable": executable, "location": location}) + return ScanResult(decision="allow", reason="ok") + + monkeypatch.setattr("deerflow.skills.installer.scan_skill_content", _scan) + + get_or_new_skill_storage(skills_path=skills_root).install_skill_from_archive(zip_path) + + assert calls == [ + { + "content": "---\nname: test-skill\ndescription: A test skill\n---\n\n# test-skill\n", + "executable": False, + "location": "test-skill/SKILL.md", + }, + { + "content": "# Guide\n", + "executable": False, + "location": "test-skill/references/guide.md", + }, + { + "content": "#!/bin/sh\necho ok\n", + "executable": True, + "location": "test-skill/scripts/run.sh", + }, + { + "content": "Use care.\n", + "executable": False, + "location": "test-skill/templates/prompt.txt", + }, + ] + assert all("secret" not in call["content"] for call in calls) + + def test_nested_skill_markdown_prevents_install(self, tmp_path): + zip_path = tmp_path / "test-skill.skill" + with zipfile.ZipFile(zip_path, "w") as zf: + zf.writestr("test-skill/SKILL.md", "---\nname: test-skill\ndescription: A test skill\n---\n\n# test-skill\n") + zf.writestr("test-skill/references/other/SKILL.md", "# Nested skill\n") + skills_root = tmp_path / "skills" + skills_root.mkdir() + + with pytest.raises(SkillSecurityScanError, match="nested SKILL.md"): + get_or_new_skill_storage(skills_path=skills_root).install_skill_from_archive(zip_path) + + assert not (skills_root / "custom" / "test-skill").exists() + + def test_script_warn_prevents_install(self, tmp_path, monkeypatch): + zip_path = tmp_path / "test-skill.skill" + with zipfile.ZipFile(zip_path, "w") as zf: + zf.writestr("test-skill/SKILL.md", "---\nname: test-skill\ndescription: A test skill\n---\n\n# test-skill\n") + zf.writestr("test-skill/scripts/run.sh", "#!/bin/sh\necho ok\n") + skills_root = tmp_path / "skills" + skills_root.mkdir() + + async def _scan(*args, executable, **kwargs): + if executable: + return ScanResult(decision="warn", reason="script needs review") + return ScanResult(decision="allow", reason="ok") + + monkeypatch.setattr("deerflow.skills.installer.scan_skill_content", _scan) + + with pytest.raises(SkillSecurityScanError, match="rejected executable.*script needs review"): + get_or_new_skill_storage(skills_path=skills_root).install_skill_from_archive(zip_path) + + assert not (skills_root / "custom" / "test-skill").exists() + + def test_security_scan_block_prevents_install(self, tmp_path, monkeypatch): + zip_path = self._make_skill_zip(tmp_path, skill_name="blocked-skill") + skills_root = tmp_path / "skills" + skills_root.mkdir() + + async def _scan(*args, **kwargs): + return ScanResult(decision="block", reason="prompt injection") + + monkeypatch.setattr("deerflow.skills.installer.scan_skill_content", _scan) + + with pytest.raises(SkillSecurityScanError, match="Security scan blocked.*prompt injection"): + get_or_new_skill_storage(skills_path=skills_root).install_skill_from_archive(zip_path) + + assert not (skills_root / "custom" / "blocked-skill").exists() + + def test_copy_failure_does_not_leave_partial_install(self, tmp_path, monkeypatch): + zip_path = self._make_skill_zip(tmp_path) + skills_root = tmp_path / "skills" + skills_root.mkdir() + + def _copytree(src, dst): + partial = Path(dst) + partial.mkdir(parents=True) + (partial / "partial.txt").write_text("partial", encoding="utf-8") + raise OSError("copy failed") + + monkeypatch.setattr("deerflow.skills.installer.shutil.copytree", _copytree) + + with pytest.raises(OSError, match="copy failed"): + get_or_new_skill_storage(skills_path=skills_root).install_skill_from_archive(zip_path) + + custom_dir = skills_root / "custom" + assert not (custom_dir / "test-skill").exists() + assert not [path for path in custom_dir.iterdir() if path.name.startswith(".installing-test-skill-")] + + def test_concurrent_target_creation_does_not_get_clobbered(self, tmp_path, monkeypatch): + zip_path = self._make_skill_zip(tmp_path) + skills_root = tmp_path / "skills" + skills_root.mkdir() + target = skills_root / "custom" / "test-skill" + original_copytree = shutil.copytree + + def _copytree(src, dst): + target.mkdir(parents=True) + (target / "marker.txt").write_text("external", encoding="utf-8") + return original_copytree(src, dst) + + monkeypatch.setattr("deerflow.skills.installer.shutil.copytree", _copytree) + + with pytest.raises(ValueError, match="already exists"): + get_or_new_skill_storage(skills_path=skills_root).install_skill_from_archive(zip_path) + + assert (target / "marker.txt").read_text(encoding="utf-8") == "external" + assert not (target / "SKILL.md").exists() + + def test_move_failure_cleans_reserved_target(self, tmp_path, monkeypatch): + zip_path = self._make_skill_zip(tmp_path) + skills_root = tmp_path / "skills" + skills_root.mkdir() + + def _move(src, dst): + Path(dst).write_text("partial", encoding="utf-8") + raise OSError("move failed") + + monkeypatch.setattr("deerflow.skills.installer.shutil.move", _move) + + with pytest.raises(OSError, match="move failed"): + get_or_new_skill_storage(skills_path=skills_root).install_skill_from_archive(zip_path) + + assert not (skills_root / "custom" / "test-skill").exists() + def test_duplicate_raises(self, tmp_path): zip_path = self._make_skill_zip(tmp_path) skills_root = tmp_path / "skills" (skills_root / "custom" / "test-skill").mkdir(parents=True) with pytest.raises(ValueError, match="already exists"): - install_skill_from_archive(zip_path, skills_root=skills_root) + get_or_new_skill_storage(skills_path=skills_root).install_skill_from_archive(zip_path) def test_invalid_extension(self, tmp_path): bad_path = tmp_path / "bad.zip" bad_path.write_text("not a skill") with pytest.raises(ValueError, match=".skill"): - install_skill_from_archive(bad_path) + get_or_new_skill_storage(skills_path=tmp_path).install_skill_from_archive(bad_path) def test_bad_frontmatter(self, tmp_path): zip_path = tmp_path / "bad.skill" @@ -208,11 +390,11 @@ class TestInstallSkillFromArchive: skills_root = tmp_path / "skills" skills_root.mkdir() with pytest.raises(ValueError, match="Invalid skill"): - install_skill_from_archive(zip_path, skills_root=skills_root) + get_or_new_skill_storage(skills_path=skills_root).install_skill_from_archive(zip_path) - def test_nonexistent_file(self): + def test_nonexistent_file(self, tmp_path): with pytest.raises(FileNotFoundError): - install_skill_from_archive(Path("/nonexistent/path.skill")) + get_or_new_skill_storage(skills_path=tmp_path).install_skill_from_archive(Path("/nonexistent/path.skill")) def test_macosx_filtered_during_resolve(self, tmp_path): """Archive with __MACOSX dir still installs correctly.""" @@ -222,6 +404,6 @@ class TestInstallSkillFromArchive: zf.writestr("__MACOSX/._my-skill", "meta") skills_root = tmp_path / "skills" skills_root.mkdir() - result = install_skill_from_archive(zip_path, skills_root=skills_root) + result = get_or_new_skill_storage(skills_path=skills_root).install_skill_from_archive(zip_path) assert result["success"] is True assert result["skill_name"] == "my-skill" diff --git a/backend/tests/test_skills_loader.py b/backend/tests/test_skills_loader.py index 7d885444d..5a03532c6 100644 --- a/backend/tests/test_skills_loader.py +++ b/backend/tests/test_skills_loader.py @@ -1,8 +1,10 @@ """Tests for recursive skills loading.""" from pathlib import Path +from types import SimpleNamespace -from deerflow.skills.loader import get_skills_root_path, load_skills +from deerflow.config.skills_config import SkillsConfig +from deerflow.skills.storage import get_or_new_skill_storage def _write_skill(skill_dir: Path, name: str, description: str) -> None: @@ -14,7 +16,8 @@ def _write_skill(skill_dir: Path, name: str, description: str) -> None: def test_get_skills_root_path_points_to_project_root_skills(): """get_skills_root_path() should point to deer-flow/skills (sibling of backend/), not backend/packages/skills.""" - path = get_skills_root_path() + app_config = SimpleNamespace(skills=SkillsConfig()) + path = get_or_new_skill_storage(app_config=app_config).get_skills_root_path() assert path.name == "skills", f"Expected 'skills', got '{path.name}'" assert (path.parent / "backend").is_dir(), f"Expected skills path's parent to be project root containing 'backend/', but got {path}" @@ -27,7 +30,7 @@ def test_load_skills_discovers_nested_skills_and_sets_container_paths(tmp_path: _write_skill(skills_root / "public" / "parent" / "child-skill", "child-skill", "Child skill") _write_skill(skills_root / "custom" / "team" / "helper", "team-helper", "Team helper") - skills = load_skills(skills_path=skills_root, use_config=False, enabled_only=False) + skills = get_or_new_skill_storage(skills_path=skills_root).load_skills(enabled_only=False) by_name = {skill.name: skill for skill in skills} assert {"root-skill", "child-skill", "team-helper"} <= set(by_name) @@ -57,7 +60,7 @@ def test_load_skills_skips_hidden_directories(tmp_path: Path): "Hidden skill", ) - skills = load_skills(skills_path=skills_root, use_config=False, enabled_only=False) + skills = get_or_new_skill_storage(skills_path=skills_root).load_skills(enabled_only=False) names = {skill.name for skill in skills} assert "ok-skill" in names @@ -69,7 +72,7 @@ def test_load_skills_prefers_custom_over_public_with_same_name(tmp_path: Path): _write_skill(skills_root / "public" / "shared-skill", "shared-skill", "Public version") _write_skill(skills_root / "custom" / "shared-skill", "shared-skill", "Custom version") - skills = load_skills(skills_path=skills_root, use_config=False, enabled_only=False) + skills = get_or_new_skill_storage(skills_path=skills_root).load_skills(enabled_only=False) shared = next(skill for skill in skills if skill.name == "shared-skill") assert shared.category == "custom" diff --git a/backend/tests/test_subagent_executor.py b/backend/tests/test_subagent_executor.py index a6a62c2b6..774bd2dd9 100644 --- a/backend/tests/test_subagent_executor.py +++ b/backend/tests/test_subagent_executor.py @@ -3,7 +3,7 @@ Covers: - SubagentExecutor.execute() synchronous execution path - SubagentExecutor._aexecute() asynchronous execution path -- asyncio.run() properly executes async workflow within thread pool context +- execute_async() routes background work without bouncing through execute() - Error handling in both sync and async paths - Async tool support (MCP tools) - Cooperative cancellation via cancel_event @@ -17,6 +17,7 @@ import asyncio import sys import threading from datetime import datetime +from types import ModuleType, SimpleNamespace from unittest.mock import MagicMock, patch import pytest @@ -153,6 +154,13 @@ def mock_agent(): return agent +def _module(name: str, **attrs): + module = ModuleType(name) + for key, value in attrs.items(): + setattr(module, key, value) + return module + + # Helper to create real message objects class _MsgHelper: """Helper to create real message objects from fixture classes.""" @@ -176,6 +184,88 @@ def msg(classes): return _MsgHelper(classes) +# ----------------------------------------------------------------------------- +# Agent Construction Tests +# ----------------------------------------------------------------------------- + + +class TestAgentConstruction: + """Test _create_agent() wiring before execution starts.""" + + def test_create_agent_threads_explicit_app_config_to_model_and_middlewares( + self, + classes, + base_config, + monkeypatch: pytest.MonkeyPatch, + ): + """Explicit app_config must flow into both model and middleware factories.""" + import deerflow.config as config_module + from deerflow.subagents import executor as executor_module + + SubagentExecutor = classes["SubagentExecutor"] + + app_config = object() + model = object() + middlewares = [object()] + agent = object() + captured: dict[str, dict] = {} + + def fake_get_app_config(): + raise AssertionError("ambient get_app_config() must not be used when app_config is explicit") + + def fake_create_chat_model(**kwargs): + captured["model"] = kwargs + return model + + def fake_build_subagent_runtime_middlewares(**kwargs): + captured["middlewares"] = kwargs + return middlewares + + def fake_create_agent(**kwargs): + captured["agent"] = kwargs + return agent + + monkeypatch.setattr(config_module, "get_app_config", fake_get_app_config) + monkeypatch.setattr( + executor_module, + "create_chat_model", + fake_create_chat_model, + ) + monkeypatch.setattr(executor_module, "create_agent", fake_create_agent) + monkeypatch.setitem( + sys.modules, + "deerflow.agents.middlewares.tool_error_handling_middleware", + _module( + "deerflow.agents.middlewares.tool_error_handling_middleware", + build_subagent_runtime_middlewares=fake_build_subagent_runtime_middlewares, + ), + ) + + executor = SubagentExecutor( + config=base_config, + tools=[], + app_config=app_config, + parent_model="parent-model", + ) + + result = executor._create_agent() + + assert result is agent + assert captured["model"] == { + "name": "parent-model", + "thinking_enabled": False, + "app_config": app_config, + } + assert captured["middlewares"] == { + "app_config": app_config, + "lazy_init": True, + } + assert captured["agent"]["model"] is model + assert captured["agent"]["middleware"] is middlewares + assert captured["agent"]["tools"] == [] + assert captured["agent"]["system_prompt"] == base_config.system_prompt + + # ----------------------------------------------------------------------------- # Async Execution Path Tests # ----------------------------------------------------------------------------- @@ -397,7 +487,7 @@ class TestSyncExecutionPath: """Test that execute() works correctly when called from a thread pool. This simulates the real-world usage where execute() is called from - _execution_pool in execute_async(). + a worker thread outside the main event loop. """ from concurrent.futures import ThreadPoolExecutor @@ -425,7 +515,7 @@ class TestSyncExecutionPath: with patch.object(executor, "_create_agent", return_value=mock_agent): return executor.execute("Task") - # Execute in thread pool (simulating _execution_pool usage) + # Execute in thread pool to simulate sync execution outside the main loop. with ThreadPoolExecutor(max_workers=1) as pool: future = pool.submit(run_in_thread) result = future.result(timeout=5) @@ -434,12 +524,21 @@ class TestSyncExecutionPath: assert result.result == "Thread pool result" @pytest.mark.anyio - async def test_execute_in_running_event_loop_uses_isolated_thread(self, classes, base_config, mock_agent, msg): - """Test that execute() uses the isolated-thread path inside a running loop.""" + async def test_execute_in_running_event_loop_calls_isolated_loop_directly(self, classes, base_config, mock_agent, msg): + """Test that execute() calls the isolated-loop helper directly in a running loop.""" + from deerflow.runtime.user_context import ( + get_effective_user_id, + reset_current_user, + set_current_user, + ) + SubagentExecutor = classes["SubagentExecutor"] SubagentStatus = classes["SubagentStatus"] + caller_thread = threading.current_thread().name + isolated_helper_threads = [] execution_threads = [] + effective_user_ids = [] final_state = { "messages": [ msg.human("Task"), @@ -449,6 +548,55 @@ class TestSyncExecutionPath: async def mock_astream(*args, **kwargs): execution_threads.append(threading.current_thread().name) + effective_user_ids.append(get_effective_user_id()) + yield final_state + + mock_agent.astream = mock_astream + + executor = SubagentExecutor( + config=base_config, + tools=[], + thread_id="test-thread", + ) + + original_isolated_execute = executor._execute_in_isolated_loop + + def tracked_isolated_execute(task, result_holder=None): + isolated_helper_threads.append(threading.current_thread().name) + return original_isolated_execute(task, result_holder) + + token = set_current_user(SimpleNamespace(id="alice")) + try: + with patch.object(executor, "_create_agent", return_value=mock_agent): + with patch.object(executor, "_execute_in_isolated_loop", side_effect=tracked_isolated_execute) as isolated: + result = executor.execute("Task") + finally: + reset_current_user(token) + + assert isolated.call_count == 1 + assert isolated_helper_threads == [caller_thread] + assert execution_threads + assert execution_threads == ["subagent-persistent-loop"] + assert effective_user_ids == ["alice"] + assert result.status == SubagentStatus.COMPLETED + assert result.result == "Async loop result" + + @pytest.mark.anyio + async def test_execute_in_running_event_loop_reuses_persistent_isolated_loop(self, classes, base_config, mock_agent, msg): + """Regression: repeated isolated executions should reuse one long-lived loop.""" + SubagentExecutor = classes["SubagentExecutor"] + SubagentStatus = classes["SubagentStatus"] + execution_loops = [] + + final_state = { + "messages": [ + msg.human("Task"), + msg.ai("Async loop result", "msg-1"), + ] + } + + async def mock_astream(*args, **kwargs): + execution_loops.append(asyncio.get_running_loop()) yield final_state mock_agent.astream = mock_astream @@ -460,14 +608,14 @@ class TestSyncExecutionPath: ) with patch.object(executor, "_create_agent", return_value=mock_agent): - with patch.object(executor, "_execute_in_isolated_loop", wraps=executor._execute_in_isolated_loop) as isolated: - result = executor.execute("Task") + first = executor.execute("Task 1") + second = executor.execute("Task 2") - assert isolated.call_count == 1 - assert execution_threads - assert all(name.startswith("subagent-isolated-") for name in execution_threads) - assert result.status == SubagentStatus.COMPLETED - assert result.result == "Async loop result" + assert first.status == SubagentStatus.COMPLETED + assert second.status == SubagentStatus.COMPLETED + assert len(execution_loops) == 2 + assert execution_loops[0] is execution_loops[1] + assert execution_loops[0].is_running() def test_execute_handles_asyncio_run_failure(self, classes, base_config): """Test handling when asyncio.run() itself fails.""" @@ -932,6 +1080,100 @@ class TestCooperativeCancellation: """Test that requesting cancellation on a nonexistent task does not raise.""" executor_module.request_cancel_background_task("nonexistent-task") + def test_execute_async_runs_without_calling_execute(self, executor_module, classes, base_config): + """Regression: execute_async should not route through execute()/asyncio.run().""" + import concurrent.futures + + SubagentExecutor = classes["SubagentExecutor"] + SubagentResult = classes["SubagentResult"] + SubagentStatus = classes["SubagentStatus"] + + def run_inline(fn, *args, **kwargs): + future = concurrent.futures.Future() + try: + future.set_result(fn(*args, **kwargs)) + except Exception as exc: + future.set_exception(exc) + return future + + async def fake_aexecute(task, result_holder=None): + result = result_holder or SubagentResult( + task_id="inline-task", + trace_id="test-trace", + status=SubagentStatus.RUNNING, + ) + result.status = SubagentStatus.COMPLETED + result.result = f"done: {task}" + result.completed_at = datetime.now() + return result + + executor = SubagentExecutor( + config=base_config, + tools=[], + thread_id="test-thread", + trace_id="test-trace", + ) + + with ( + patch.object(executor_module._scheduler_pool, "submit", side_effect=run_inline), + patch.object(executor, "_aexecute", side_effect=fake_aexecute), + patch.object(executor, "execute", side_effect=AssertionError("execute() should not be called by execute_async")), + ): + task_id = executor.execute_async("Task") + + result = executor_module._background_tasks.get(task_id) + assert result is not None + assert result.status == SubagentStatus.COMPLETED + assert result.result == "done: Task" + assert result.error is None + + def test_execute_async_propagates_user_context_to_isolated_loop(self, executor_module, classes, base_config): + """Regression: background subagent execution must keep request user context.""" + import concurrent.futures + + from deerflow.runtime.user_context import ( + get_effective_user_id, + reset_current_user, + set_current_user, + ) + + SubagentExecutor = classes["SubagentExecutor"] + SubagentStatus = classes["SubagentStatus"] + + async def fake_aexecute(task, result_holder=None): + result = result_holder + result.status = SubagentStatus.COMPLETED + result.result = get_effective_user_id() + result.completed_at = datetime.now() + return result + + executor = SubagentExecutor( + config=base_config, + tools=[], + thread_id="test-thread", + trace_id="test-trace", + ) + + scheduler = concurrent.futures.ThreadPoolExecutor(max_workers=1) + token = set_current_user(SimpleNamespace(id="alice")) + try: + with ( + patch.object(executor_module, "_scheduler_pool", scheduler), + patch.object(executor, "_aexecute", side_effect=fake_aexecute), + patch.object(executor, "execute", side_effect=AssertionError("execute() should not be called by execute_async")), + ): + task_id = executor.execute_async("Task") + executor_module._scheduler_pool.shutdown(wait=True) + finally: + reset_current_user(token) + scheduler.shutdown(wait=False, cancel_futures=True) + + result = executor_module._background_tasks.get(task_id) + assert result is not None + assert result.status == SubagentStatus.COMPLETED + assert result.result == "alice" + assert result.error is None + def test_timeout_does_not_overwrite_cancelled(self, executor_module, classes, base_config, msg): """Test that the real timeout handler does not overwrite CANCELLED status. @@ -953,25 +1195,13 @@ class TestCooperativeCancellation: ) # Synchronisation primitives - execute_entered = threading.Event() # signals that execute() has started - execute_release = threading.Event() # lets execute() return + execute_entered = threading.Event() # signals that _aexecute() has started run_task_done = threading.Event() # signals that run_task() has finished - # A blocking execute() replacement so we control the timing exactly - def blocking_execute(task, result_holder=None): - # Cooperative cancellation: honour cancel_event like real _aexecute - if result_holder and result_holder.cancel_event.is_set(): - result_holder.status = SubagentStatus.CANCELLED - result_holder.error = "Cancelled by user" - result_holder.completed_at = datetime.now() - execute_entered.set() - return result_holder + # A blocking _aexecute() replacement so we control the timing exactly. + async def blocking_aexecute(task, result_holder=None): execute_entered.set() - execute_release.wait(timeout=5) - # Return a minimal completed result (will be ignored because timeout fires first) - from deerflow.subagents.executor import SubagentResult as _R - - return _R(task_id="x", trace_id="t", status=SubagentStatus.COMPLETED, result="late") + await asyncio.Event().wait() executor = SubagentExecutor( config=short_config, @@ -992,11 +1222,11 @@ class TestCooperativeCancellation: return original_scheduler_submit(wrapper) - with patch.object(executor, "execute", blocking_execute), patch.object(executor_module._scheduler_pool, "submit", tracked_submit): + with patch.object(executor, "_aexecute", side_effect=blocking_aexecute), patch.object(executor_module._scheduler_pool, "submit", tracked_submit): task_id = executor.execute_async("Task") - # Wait until execute() is entered (i.e. it's running in _execution_pool) - assert execute_entered.wait(timeout=3), "execute() was never called" + # Wait until _aexecute() is entered on the persistent loop. + assert execute_entered.wait(timeout=3), "_aexecute() was never called" # Set CANCELLED on the result before the timeout handler runs. # The 50ms timeout will fire while execute() is blocked. @@ -1009,11 +1239,6 @@ class TestCooperativeCancellation: # now executed and (should have) left CANCELLED intact. assert run_task_done.wait(timeout=5), "run_task() did not finish" - # Only NOW release the blocked execute() so the thread pool worker - # can be reclaimed. This MUST come after run_task_done to avoid a - # race where execute() returns before the timeout fires. - execute_release.set() - result = executor_module._background_tasks.get(task_id) assert result is not None # The RUNNING guard in the FuturesTimeoutError handler must have diff --git a/backend/tests/test_subagent_prompt_security.py b/backend/tests/test_subagent_prompt_security.py index d0e5a949f..015206877 100644 --- a/backend/tests/test_subagent_prompt_security.py +++ b/backend/tests/test_subagent_prompt_security.py @@ -25,7 +25,9 @@ def test_build_subagent_section_hides_bash_examples_when_unavailable(monkeypatch section = prompt_module._build_subagent_section(3) - assert "Not available in the current sandbox configuration" in section + # When bash is not available, it should not appear at all (aligned with Codex: + # unavailable roles are omitted, not listed as disabled) + assert "**bash**" not in section assert 'bash("npm test")' not in section assert 'read_file("/mnt/user-data/workspace/README.md")' in section assert "available tools (ls, read_file, web_search, etc.)" in section diff --git a/backend/tests/test_subagent_skills_config.py b/backend/tests/test_subagent_skills_config.py new file mode 100644 index 000000000..f121ccf25 --- /dev/null +++ b/backend/tests/test_subagent_skills_config.py @@ -0,0 +1,596 @@ +"""Tests for subagent per-agent skill configuration and custom subagent types. + +Covers: +- SubagentConfig.skills field +- SubagentOverrideConfig.skills field +- CustomSubagentConfig model validation +- SubagentsAppConfig.custom_agents and get_skills_for() +- Registry: custom agent lookup, skills override, merged available names +- Skills filter passthrough in task_tool config assembly +""" + +import pytest + +from deerflow.config.subagents_config import ( + CustomSubagentConfig, + SubagentOverrideConfig, + SubagentsAppConfig, + get_subagents_app_config, + load_subagents_config_from_dict, +) +from deerflow.subagents.config import SubagentConfig + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _reset_subagents_config(**kwargs) -> None: + """Reset global subagents config to a known state.""" + load_subagents_config_from_dict(kwargs) + + +# --------------------------------------------------------------------------- +# SubagentConfig.skills field +# --------------------------------------------------------------------------- + + +class TestSubagentConfigSkills: + def test_default_skills_is_none(self): + config = SubagentConfig(name="test", description="test", system_prompt="test") + assert config.skills is None + + def test_skills_whitelist(self): + config = SubagentConfig( + name="test", + description="test", + system_prompt="test", + skills=["data-analysis", "visualization"], + ) + assert config.skills == ["data-analysis", "visualization"] + + def test_skills_empty_list_means_no_skills(self): + config = SubagentConfig( + name="test", + description="test", + system_prompt="test", + skills=[], + ) + assert config.skills == [] + + +# --------------------------------------------------------------------------- +# SubagentOverrideConfig.skills field +# --------------------------------------------------------------------------- + + +class TestSubagentOverrideConfigSkills: + def test_default_skills_is_none(self): + override = SubagentOverrideConfig() + assert override.skills is None + + def test_skills_whitelist(self): + override = SubagentOverrideConfig(skills=["web-search", "data-analysis"]) + assert override.skills == ["web-search", "data-analysis"] + + def test_skills_empty_list(self): + override = SubagentOverrideConfig(skills=[]) + assert override.skills == [] + + def test_skills_coexists_with_other_fields(self): + override = SubagentOverrideConfig( + timeout_seconds=300, + model="gpt-5", + skills=["my-skill"], + ) + assert override.timeout_seconds == 300 + assert override.model == "gpt-5" + assert override.skills == ["my-skill"] + + +# --------------------------------------------------------------------------- +# CustomSubagentConfig model +# --------------------------------------------------------------------------- + + +class TestCustomSubagentConfig: + def test_minimal_valid(self): + config = CustomSubagentConfig( + description="A test agent", + system_prompt="You are a test agent.", + ) + assert config.description == "A test agent" + assert config.system_prompt == "You are a test agent." + assert config.tools is None + assert config.disallowed_tools == ["task", "ask_clarification", "present_files"] + assert config.skills is None + assert config.model == "inherit" + assert config.max_turns == 50 + assert config.timeout_seconds == 900 + + def test_full_configuration(self): + config = CustomSubagentConfig( + description="Data analysis specialist", + system_prompt="You are a data analysis subagent.", + tools=["bash", "read_file", "write_file"], + disallowed_tools=["task"], + skills=["data-analysis", "visualization"], + model="qwen3:32b", + max_turns=80, + timeout_seconds=600, + ) + assert config.tools == ["bash", "read_file", "write_file"] + assert config.skills == ["data-analysis", "visualization"] + assert config.model == "qwen3:32b" + assert config.max_turns == 80 + assert config.timeout_seconds == 600 + + def test_skills_empty_list_no_skills(self): + config = CustomSubagentConfig( + description="test", + system_prompt="test", + skills=[], + ) + assert config.skills == [] + + def test_rejects_zero_max_turns(self): + with pytest.raises(ValueError): + CustomSubagentConfig( + description="test", + system_prompt="test", + max_turns=0, + ) + + def test_rejects_zero_timeout(self): + with pytest.raises(ValueError): + CustomSubagentConfig( + description="test", + system_prompt="test", + timeout_seconds=0, + ) + + +# --------------------------------------------------------------------------- +# SubagentsAppConfig.custom_agents and get_skills_for() +# --------------------------------------------------------------------------- + + +class TestSubagentsAppConfigCustomAgents: + def test_default_custom_agents_empty(self): + config = SubagentsAppConfig() + assert config.custom_agents == {} + + def test_custom_agents_loaded(self): + config = SubagentsAppConfig( + custom_agents={ + "analysis": CustomSubagentConfig( + description="Analysis agent", + system_prompt="You analyze data.", + skills=["data-analysis"], + ), + } + ) + assert "analysis" in config.custom_agents + assert config.custom_agents["analysis"].skills == ["data-analysis"] + + def test_multiple_custom_agents(self): + config = SubagentsAppConfig( + custom_agents={ + "analysis": CustomSubagentConfig( + description="Analysis", + system_prompt="analyze", + skills=["data-analysis"], + ), + "researcher": CustomSubagentConfig( + description="Research", + system_prompt="research", + skills=["web-search"], + ), + } + ) + assert len(config.custom_agents) == 2 + + +class TestGetSkillsFor: + def test_returns_none_when_no_override(self): + config = SubagentsAppConfig() + assert config.get_skills_for("general-purpose") is None + assert config.get_skills_for("unknown") is None + + def test_returns_skills_whitelist(self): + config = SubagentsAppConfig( + agents={ + "general-purpose": SubagentOverrideConfig(skills=["web-search", "coding"]), + } + ) + assert config.get_skills_for("general-purpose") == ["web-search", "coding"] + + def test_returns_empty_list_for_no_skills(self): + config = SubagentsAppConfig( + agents={ + "bash": SubagentOverrideConfig(skills=[]), + } + ) + assert config.get_skills_for("bash") == [] + + def test_returns_none_for_unrelated_agent(self): + config = SubagentsAppConfig( + agents={ + "bash": SubagentOverrideConfig(skills=["web-search"]), + } + ) + assert config.get_skills_for("general-purpose") is None + + def test_returns_none_when_skills_not_set(self): + config = SubagentsAppConfig( + agents={ + "bash": SubagentOverrideConfig(timeout_seconds=300), + } + ) + assert config.get_skills_for("bash") is None + + +# --------------------------------------------------------------------------- +# load_subagents_config_from_dict with skills and custom_agents +# --------------------------------------------------------------------------- + + +class TestLoadSubagentsConfigWithSkills: + def teardown_method(self): + _reset_subagents_config() + + def test_load_with_skills_override(self): + load_subagents_config_from_dict( + { + "timeout_seconds": 900, + "agents": { + "general-purpose": {"skills": ["web-search", "data-analysis"]}, + }, + } + ) + cfg = get_subagents_app_config() + assert cfg.get_skills_for("general-purpose") == ["web-search", "data-analysis"] + + def test_load_with_empty_skills(self): + load_subagents_config_from_dict( + { + "timeout_seconds": 900, + "agents": { + "bash": {"skills": []}, + }, + } + ) + cfg = get_subagents_app_config() + assert cfg.get_skills_for("bash") == [] + + def test_load_with_custom_agents(self): + load_subagents_config_from_dict( + { + "timeout_seconds": 900, + "custom_agents": { + "analysis": { + "description": "Data analysis specialist", + "system_prompt": "You are a data analysis subagent.", + "skills": ["data-analysis", "visualization"], + "tools": ["bash", "read_file"], + "max_turns": 80, + "timeout_seconds": 600, + }, + }, + } + ) + cfg = get_subagents_app_config() + assert "analysis" in cfg.custom_agents + custom = cfg.custom_agents["analysis"] + assert custom.skills == ["data-analysis", "visualization"] + assert custom.tools == ["bash", "read_file"] + assert custom.max_turns == 80 + assert custom.timeout_seconds == 600 + + def test_load_with_both_overrides_and_custom(self): + load_subagents_config_from_dict( + { + "timeout_seconds": 900, + "agents": { + "general-purpose": {"skills": ["web-search"]}, + }, + "custom_agents": { + "analysis": { + "description": "Analysis", + "system_prompt": "Analyze.", + "skills": ["data-analysis"], + }, + }, + } + ) + cfg = get_subagents_app_config() + assert cfg.get_skills_for("general-purpose") == ["web-search"] + assert cfg.custom_agents["analysis"].skills == ["data-analysis"] + + +# --------------------------------------------------------------------------- +# Registry: custom agent lookup +# --------------------------------------------------------------------------- + + +class TestRegistryCustomAgentLookup: + def teardown_method(self): + _reset_subagents_config() + + def test_custom_agent_found(self): + from deerflow.subagents.registry import get_subagent_config + + load_subagents_config_from_dict( + { + "custom_agents": { + "analysis": { + "description": "Data analysis specialist", + "system_prompt": "You are a data analysis subagent.", + "skills": ["data-analysis"], + "tools": ["bash", "read_file"], + "max_turns": 80, + "timeout_seconds": 600, + }, + }, + } + ) + config = get_subagent_config("analysis") + assert config is not None + assert config.name == "analysis" + assert config.skills == ["data-analysis"] + assert config.tools == ["bash", "read_file"] + assert config.max_turns == 80 + assert config.timeout_seconds == 600 + assert config.model == "inherit" + + def test_custom_agent_not_found(self): + from deerflow.subagents.registry import get_subagent_config + + _reset_subagents_config() + assert get_subagent_config("nonexistent") is None + + def test_builtin_takes_priority_over_custom(self): + """If a custom agent has the same name as a builtin, builtin wins.""" + from deerflow.subagents.builtins import BUILTIN_SUBAGENTS + from deerflow.subagents.registry import get_subagent_config + + load_subagents_config_from_dict( + { + "custom_agents": { + "general-purpose": { + "description": "Custom override attempt", + "system_prompt": "Should not be used", + }, + }, + } + ) + config = get_subagent_config("general-purpose") + # Should get the builtin description, not the custom one + assert config.description == BUILTIN_SUBAGENTS["general-purpose"].description + + def test_custom_agent_with_override(self): + """Per-agent overrides also apply to custom agents.""" + from deerflow.subagents.registry import get_subagent_config + + load_subagents_config_from_dict( + { + "custom_agents": { + "analysis": { + "description": "Analysis", + "system_prompt": "Analyze.", + "timeout_seconds": 600, + }, + }, + "agents": { + "analysis": {"timeout_seconds": 300, "skills": ["overridden-skill"]}, + }, + } + ) + config = get_subagent_config("analysis") + assert config is not None + assert config.timeout_seconds == 300 # Override applied + assert config.skills == ["overridden-skill"] # Override applied + + +# --------------------------------------------------------------------------- +# Registry: skills override on builtin agents +# --------------------------------------------------------------------------- + + +class TestRegistrySkillsOverride: + def teardown_method(self): + _reset_subagents_config() + + def test_skills_override_applied_to_builtin(self): + from deerflow.subagents.registry import get_subagent_config + + load_subagents_config_from_dict( + { + "agents": { + "general-purpose": {"skills": ["web-search", "data-analysis"]}, + }, + } + ) + config = get_subagent_config("general-purpose") + assert config.skills == ["web-search", "data-analysis"] + + def test_empty_skills_override(self): + from deerflow.subagents.registry import get_subagent_config + + load_subagents_config_from_dict( + { + "agents": { + "bash": {"skills": []}, + }, + } + ) + config = get_subagent_config("bash") + assert config.skills == [] + + def test_no_skills_override_keeps_default(self): + from deerflow.subagents.registry import get_subagent_config + + _reset_subagents_config() + config = get_subagent_config("general-purpose") + assert config.skills is None # Default: inherit all + + def test_skills_override_does_not_mutate_builtin(self): + from deerflow.subagents.builtins import BUILTIN_SUBAGENTS + from deerflow.subagents.registry import get_subagent_config + + load_subagents_config_from_dict( + { + "agents": { + "general-purpose": {"skills": ["web-search"]}, + }, + } + ) + _ = get_subagent_config("general-purpose") + assert BUILTIN_SUBAGENTS["general-purpose"].skills is None + + +# --------------------------------------------------------------------------- +# Registry: get_available_subagent_names merges custom types +# --------------------------------------------------------------------------- + + +class TestRegistryAvailableNames: + def teardown_method(self): + _reset_subagents_config() + + def test_includes_builtin_names(self): + from deerflow.subagents.registry import get_subagent_names + + _reset_subagents_config() + names = get_subagent_names() + assert "general-purpose" in names + assert "bash" in names + + def test_includes_custom_names(self): + from deerflow.subagents.registry import get_subagent_names + + load_subagents_config_from_dict( + { + "custom_agents": { + "analysis": { + "description": "Analysis", + "system_prompt": "Analyze.", + }, + "researcher": { + "description": "Research", + "system_prompt": "Research.", + }, + }, + } + ) + names = get_subagent_names() + assert "general-purpose" in names + assert "bash" in names + assert "analysis" in names + assert "researcher" in names + + def test_no_duplicates_when_custom_name_matches_builtin(self): + from deerflow.subagents.registry import get_subagent_names + + load_subagents_config_from_dict( + { + "custom_agents": { + "general-purpose": { + "description": "Duplicate name", + "system_prompt": "test", + }, + }, + } + ) + names = get_subagent_names() + assert names.count("general-purpose") == 1 + + +# --------------------------------------------------------------------------- +# Registry: list_subagents includes custom agents +# --------------------------------------------------------------------------- + + +class TestRegistryListSubagentsWithCustom: + def teardown_method(self): + _reset_subagents_config() + + def test_list_includes_custom_agents(self): + from deerflow.subagents.registry import list_subagents + + load_subagents_config_from_dict( + { + "custom_agents": { + "analysis": { + "description": "Analysis", + "system_prompt": "Analyze.", + "skills": ["data-analysis"], + }, + }, + } + ) + configs = list_subagents() + names = {c.name for c in configs} + assert "general-purpose" in names + assert "bash" in names + assert "analysis" in names + + def test_list_custom_agent_has_correct_skills(self): + from deerflow.subagents.registry import list_subagents + + load_subagents_config_from_dict( + { + "custom_agents": { + "analysis": { + "description": "Analysis", + "system_prompt": "Analyze.", + "skills": ["data-analysis", "visualization"], + }, + }, + } + ) + by_name = {c.name: c for c in list_subagents()} + assert by_name["analysis"].skills == ["data-analysis", "visualization"] + + +# --------------------------------------------------------------------------- +# Skills filter passthrough: verify config.skills is used in task_tool assembly +# --------------------------------------------------------------------------- + + +class TestSkillsFilterPassthrough: + """Test that SubagentConfig.skills is correctly passed to get_skills_prompt_section.""" + + def test_none_skills_passes_none_to_prompt(self): + """When config.skills is None, available_skills=None should be passed (inherit all).""" + config = SubagentConfig( + name="test", + description="test", + system_prompt="test", + skills=None, + ) + # Verify: set(None) would raise, so the code must check for None first + available = set(config.skills) if config.skills is not None else None + assert available is None + + def test_empty_skills_passes_empty_set(self): + """When config.skills is [], available_skills=set() should be passed (no skills).""" + config = SubagentConfig( + name="test", + description="test", + system_prompt="test", + skills=[], + ) + available = set(config.skills) if config.skills is not None else None + assert available == set() + + def test_skills_whitelist_passes_correct_set(self): + """When config.skills has values, those should be passed as available_skills.""" + config = SubagentConfig( + name="test", + description="test", + system_prompt="test", + skills=["data-analysis", "web-search"], + ) + available = set(config.skills) if config.skills is not None else None + assert available == {"data-analysis", "web-search"} diff --git a/backend/tests/test_suggestions_router.py b/backend/tests/test_suggestions_router.py index fee07dd44..0058e4588 100644 --- a/backend/tests/test_suggestions_router.py +++ b/backend/tests/test_suggestions_router.py @@ -1,4 +1,5 @@ import asyncio +from types import SimpleNamespace from unittest.mock import AsyncMock, MagicMock from app.gateway.routers import suggestions @@ -46,9 +47,13 @@ def test_generate_suggestions_parses_and_limits(monkeypatch): fake_model.ainvoke = AsyncMock(return_value=MagicMock(content='```json\n["Q1", "Q2", "Q3", "Q4"]\n```')) monkeypatch.setattr(suggestions, "create_chat_model", lambda **kwargs: fake_model) - result = asyncio.run(suggestions.generate_suggestions("t1", req)) + # Bypass the require_permission decorator (which needs request + + # thread_store) — these tests cover the parsing logic. + result = asyncio.run(suggestions.generate_suggestions.__wrapped__("t1", req, request=None, config=SimpleNamespace())) assert result.suggestions == ["Q1", "Q2", "Q3"] + fake_model.ainvoke.assert_awaited_once() + assert fake_model.ainvoke.await_args.kwargs["config"] == {"run_name": "suggest_agent"} def test_generate_suggestions_parses_list_block_content(monkeypatch): @@ -64,9 +69,13 @@ def test_generate_suggestions_parses_list_block_content(monkeypatch): fake_model.ainvoke = AsyncMock(return_value=MagicMock(content=[{"type": "text", "text": '```json\n["Q1", "Q2"]\n```'}])) monkeypatch.setattr(suggestions, "create_chat_model", lambda **kwargs: fake_model) - result = asyncio.run(suggestions.generate_suggestions("t1", req)) + # Bypass the require_permission decorator (which needs request + + # thread_store) — these tests cover the parsing logic. + result = asyncio.run(suggestions.generate_suggestions.__wrapped__("t1", req, request=None, config=SimpleNamespace())) assert result.suggestions == ["Q1", "Q2"] + fake_model.ainvoke.assert_awaited_once() + assert fake_model.ainvoke.await_args.kwargs["config"] == {"run_name": "suggest_agent"} def test_generate_suggestions_parses_output_text_block_content(monkeypatch): @@ -82,9 +91,13 @@ def test_generate_suggestions_parses_output_text_block_content(monkeypatch): fake_model.ainvoke = AsyncMock(return_value=MagicMock(content=[{"type": "output_text", "text": '```json\n["Q1", "Q2"]\n```'}])) monkeypatch.setattr(suggestions, "create_chat_model", lambda **kwargs: fake_model) - result = asyncio.run(suggestions.generate_suggestions("t1", req)) + # Bypass the require_permission decorator (which needs request + + # thread_store) — these tests cover the parsing logic. + result = asyncio.run(suggestions.generate_suggestions.__wrapped__("t1", req, request=None, config=SimpleNamespace())) assert result.suggestions == ["Q1", "Q2"] + fake_model.ainvoke.assert_awaited_once() + assert fake_model.ainvoke.await_args.kwargs["config"] == {"run_name": "suggest_agent"} def test_generate_suggestions_returns_empty_on_model_error(monkeypatch): @@ -97,6 +110,8 @@ def test_generate_suggestions_returns_empty_on_model_error(monkeypatch): fake_model.ainvoke = AsyncMock(side_effect=RuntimeError("boom")) monkeypatch.setattr(suggestions, "create_chat_model", lambda **kwargs: fake_model) - result = asyncio.run(suggestions.generate_suggestions("t1", req)) + # Bypass the require_permission decorator (which needs request + + # thread_store) — these tests cover the parsing logic. + result = asyncio.run(suggestions.generate_suggestions.__wrapped__("t1", req, request=None, config=SimpleNamespace())) assert result.suggestions == [] diff --git a/backend/tests/test_summarization_middleware.py b/backend/tests/test_summarization_middleware.py index d327c94c4..79ca8b01c 100644 --- a/backend/tests/test_summarization_middleware.py +++ b/backend/tests/test_summarization_middleware.py @@ -4,7 +4,7 @@ from types import SimpleNamespace from unittest.mock import MagicMock import pytest -from langchain_core.messages import AIMessage, HumanMessage, RemoveMessage +from langchain_core.messages import AIMessage, HumanMessage, RemoveMessage, ToolMessage from deerflow.agents.memory.summarization_hook import memory_flush_hook from deerflow.agents.middlewares.summarization_middleware import DeerFlowSummarizationMiddleware, SummarizationEvent @@ -29,7 +29,16 @@ def _runtime(thread_id: str | None = "thread-1", agent_name: str | None = None) return SimpleNamespace(context=context) -def _middleware(*, before_summarization=None, trigger=("messages", 4), keep=("messages", 2)) -> DeerFlowSummarizationMiddleware: +def _middleware( + *, + before_summarization=None, + trigger=("messages", 4), + keep=("messages", 2), + skill_file_read_tool_names=None, + preserve_recent_skill_count: int = 0, + preserve_recent_skill_tokens: int = 0, + preserve_recent_skill_tokens_per_skill: int = 0, +) -> DeerFlowSummarizationMiddleware: model = MagicMock() model.invoke.return_value = SimpleNamespace(text="compressed summary") return DeerFlowSummarizationMiddleware( @@ -38,9 +47,34 @@ def _middleware(*, before_summarization=None, trigger=("messages", 4), keep=("me keep=keep, token_counter=len, before_summarization=before_summarization, + skill_file_read_tool_names=skill_file_read_tool_names, + preserve_recent_skill_count=preserve_recent_skill_count, + preserve_recent_skill_tokens=preserve_recent_skill_tokens, + preserve_recent_skill_tokens_per_skill=preserve_recent_skill_tokens_per_skill, ) +def _skill_read_call(tool_id: str, skill: str) -> dict: + return { + "name": "read_file", + "id": tool_id, + "args": {"path": f"/mnt/skills/public/{skill}/SKILL.md"}, + } + + +def _skill_conversation() -> list: + return [ + HumanMessage(content="u1"), + AIMessage(content="", tool_calls=[_skill_read_call("t1", "alpha")]), + ToolMessage(content="alpha skill body", tool_call_id="t1"), + HumanMessage(content="u2"), + AIMessage(content="", tool_calls=[_skill_read_call("t2", "beta")]), + ToolMessage(content="beta skill body", tool_call_id="t2"), + HumanMessage(content="u3"), + AIMessage(content="final"), + ] + + def test_before_summarization_hook_receives_messages_before_compression() -> None: captured: list[SummarizationEvent] = [] middleware = _middleware(before_summarization=[captured.append]) @@ -167,6 +201,295 @@ def test_memory_flush_hook_enqueues_filtered_messages_and_flushes(monkeypatch: p assert add_kwargs["reinforcement_detected"] is False +def test_skill_rescue_keeps_recent_skill_reads_out_of_summary() -> None: + captured: list[SummarizationEvent] = [] + middleware = _middleware( + before_summarization=[captured.append], + trigger=("messages", 4), + keep=("messages", 2), + preserve_recent_skill_count=5, + preserve_recent_skill_tokens=10_000, + preserve_recent_skill_tokens_per_skill=10_000, + ) + + result = middleware.before_model({"messages": _skill_conversation()}, _runtime()) + + assert len(captured) == 1 + summarized_ids = {id(m) for m in captured[0].messages_to_summarize} + preserved = captured[0].preserved_messages + + # Both skill-read bundles should be rescued into preserved_messages, + # tool_call ↔ tool_result pairs stay intact. + assert any(isinstance(m, ToolMessage) and m.content == "alpha skill body" for m in preserved) + assert any(isinstance(m, ToolMessage) and m.content == "beta skill body" for m in preserved) + for m in preserved: + if isinstance(m, ToolMessage) and m.content in {"alpha skill body", "beta skill body"}: + assert id(m) not in summarized_ids + + # Preserved output order: rescued bundles first, then the tail kept by parent cutoff. + contents = [getattr(m, "content", None) for m in preserved] + assert contents[-2:] == ["u3", "final"] + + # The final emitted state should start with RemoveMessage + summary, then preserved messages. + emitted = result["messages"] + assert isinstance(emitted[0], RemoveMessage) + assert emitted[1].content.startswith("Here is a summary") + assert list(emitted[-2:]) == list(preserved[-2:]) + + +def test_skill_rescue_respects_count_budget() -> None: + captured: list[SummarizationEvent] = [] + middleware = _middleware( + before_summarization=[captured.append], + trigger=("messages", 4), + keep=("messages", 2), + preserve_recent_skill_count=1, + preserve_recent_skill_tokens=10_000, + preserve_recent_skill_tokens_per_skill=10_000, + ) + + middleware.before_model({"messages": _skill_conversation()}, _runtime()) + + preserved = captured[0].preserved_messages + summarized = captured[0].messages_to_summarize + # Newest skill (beta) rescued; older skill (alpha) falls into summary. + assert any(isinstance(m, ToolMessage) and m.content == "beta skill body" for m in preserved) + assert not any(isinstance(m, ToolMessage) and m.content == "alpha skill body" for m in preserved) + assert any(isinstance(m, ToolMessage) and m.content == "alpha skill body" for m in summarized) + + +def test_skill_rescue_uses_injected_skills_container_path() -> None: + captured: list[SummarizationEvent] = [] + middleware = _middleware( + before_summarization=[captured.append], + trigger=("messages", 4), + keep=("messages", 2), + preserve_recent_skill_count=5, + preserve_recent_skill_tokens=10_000, + preserve_recent_skill_tokens_per_skill=10_000, + ) + middleware._skills_container_path = "/custom/skills" + messages = [ + HumanMessage(content="u1"), + AIMessage(content="", tool_calls=[{"name": "read_file", "id": "t1", "args": {"path": "/custom/skills/demo/SKILL.md"}}]), + ToolMessage(content="demo skill body", tool_call_id="t1"), + HumanMessage(content="u2"), + AIMessage(content="final"), + ] + + middleware.before_model({"messages": messages}, _runtime()) + + preserved = captured[0].preserved_messages + assert any(isinstance(m, ToolMessage) and m.content == "demo skill body" for m in preserved) + + +def test_skill_rescue_uses_configured_skill_read_tool_names() -> None: + captured: list[SummarizationEvent] = [] + middleware = _middleware( + before_summarization=[captured.append], + trigger=("messages", 4), + keep=("messages", 2), + skill_file_read_tool_names=["custom_read"], + preserve_recent_skill_count=5, + preserve_recent_skill_tokens=10_000, + preserve_recent_skill_tokens_per_skill=10_000, + ) + middleware._skills_container_path = "/custom/skills" + messages = [ + HumanMessage(content="u1"), + AIMessage(content="", tool_calls=[{"name": "custom_read", "id": "t1", "args": {"path": "/custom/skills/demo/SKILL.md"}}]), + ToolMessage(content="demo skill body", tool_call_id="t1"), + HumanMessage(content="u2"), + AIMessage(content="final"), + ] + + middleware.before_model({"messages": messages}, _runtime()) + + preserved = captured[0].preserved_messages + assert any(isinstance(m, ToolMessage) and m.content == "demo skill body" for m in preserved) + + +def test_skill_rescue_respects_per_skill_token_cap() -> None: + captured: list[SummarizationEvent] = [] + middleware = _middleware( + before_summarization=[captured.append], + trigger=("messages", 4), + keep=("messages", 2), + preserve_recent_skill_count=5, + preserve_recent_skill_tokens=10_000, + # token_counter=len counts one token per message; per-skill cap of 0 rejects every bundle. + preserve_recent_skill_tokens_per_skill=0, + ) + + middleware.before_model({"messages": _skill_conversation()}, _runtime()) + + preserved = captured[0].preserved_messages + assert not any(isinstance(m, ToolMessage) and m.content in {"alpha skill body", "beta skill body"} for m in preserved) + + +def test_skill_rescue_disabled_when_count_zero() -> None: + captured: list[SummarizationEvent] = [] + middleware = _middleware( + before_summarization=[captured.append], + trigger=("messages", 4), + keep=("messages", 2), + preserve_recent_skill_count=0, + preserve_recent_skill_tokens=10_000, + preserve_recent_skill_tokens_per_skill=10_000, + ) + + middleware.before_model({"messages": _skill_conversation()}, _runtime()) + + preserved = captured[0].preserved_messages + assert not any(isinstance(m, ToolMessage) for m in preserved) + + +def test_skill_rescue_ignores_non_skill_tool_reads() -> None: + captured: list[SummarizationEvent] = [] + middleware = _middleware( + before_summarization=[captured.append], + trigger=("messages", 4), + keep=("messages", 2), + preserve_recent_skill_count=5, + preserve_recent_skill_tokens=10_000, + preserve_recent_skill_tokens_per_skill=10_000, + ) + + messages = [ + HumanMessage(content="u1"), + AIMessage( + content="", + tool_calls=[{"name": "read_file", "id": "t1", "args": {"path": "/mnt/user-data/workspace/notes.md"}}], + ), + ToolMessage(content="user notes", tool_call_id="t1"), + HumanMessage(content="u2"), + AIMessage(content="done"), + ] + + middleware.before_model({"messages": messages}, _runtime()) + + preserved = captured[0].preserved_messages + assert not any(isinstance(m, ToolMessage) and m.content == "user notes" for m in preserved) + + +def test_skill_rescue_does_not_preserve_non_skill_outputs_from_mixed_tool_calls() -> None: + captured: list[SummarizationEvent] = [] + middleware = _middleware( + before_summarization=[captured.append], + trigger=("messages", 4), + keep=("messages", 2), + preserve_recent_skill_count=5, + preserve_recent_skill_tokens=10_000, + preserve_recent_skill_tokens_per_skill=10_000, + ) + + messages = [ + HumanMessage(content="u1"), + AIMessage( + content="", + tool_calls=[ + _skill_read_call("skill-1", "alpha"), + {"name": "read_file", "id": "file-1", "args": {"path": "/mnt/user-data/workspace/notes.md"}}, + ], + ), + ToolMessage(content="alpha skill body", tool_call_id="skill-1"), + ToolMessage(content="user notes", tool_call_id="file-1"), + HumanMessage(content="u2"), + AIMessage(content="done"), + ] + + middleware.before_model({"messages": messages}, _runtime()) + + preserved = captured[0].preserved_messages + summarized = captured[0].messages_to_summarize + + preserved_ai = next(m for m in preserved if isinstance(m, AIMessage) and m.tool_calls) + summarized_ai = next(m for m in summarized if isinstance(m, AIMessage) and m.tool_calls) + + assert [tc["id"] for tc in preserved_ai.tool_calls] == ["skill-1"] + assert [tc["id"] for tc in summarized_ai.tool_calls] == ["file-1"] + assert any(isinstance(m, ToolMessage) and m.content == "alpha skill body" for m in preserved) + assert not any(isinstance(m, ToolMessage) and m.content == "user notes" for m in preserved) + assert any(isinstance(m, ToolMessage) and m.content == "user notes" for m in summarized) + + +def test_skill_rescue_clears_content_on_rescued_ai_clone() -> None: + captured: list[SummarizationEvent] = [] + middleware = _middleware( + before_summarization=[captured.append], + trigger=("messages", 4), + keep=("messages", 2), + preserve_recent_skill_count=5, + preserve_recent_skill_tokens=10_000, + preserve_recent_skill_tokens_per_skill=10_000, + ) + + messages = [ + HumanMessage(content="u1"), + AIMessage( + content="reading skill and notes", + tool_calls=[ + _skill_read_call("skill-1", "alpha"), + {"name": "read_file", "id": "file-1", "args": {"path": "/mnt/user-data/workspace/notes.md"}}, + ], + ), + ToolMessage(content="alpha skill body", tool_call_id="skill-1"), + ToolMessage(content="user notes", tool_call_id="file-1"), + HumanMessage(content="u2"), + AIMessage(content="done"), + ] + + middleware.before_model({"messages": messages}, _runtime()) + + preserved = captured[0].preserved_messages + summarized = captured[0].messages_to_summarize + + preserved_ai = next(m for m in preserved if isinstance(m, AIMessage) and m.tool_calls) + summarized_ai = next(m for m in summarized if isinstance(m, AIMessage) and m.tool_calls) + + assert preserved_ai.content == "" + assert summarized_ai.content == "reading skill and notes" + + +def test_skill_rescue_only_preserves_skill_calls_with_matched_tool_results() -> None: + captured: list[SummarizationEvent] = [] + middleware = _middleware( + before_summarization=[captured.append], + trigger=("messages", 4), + keep=("messages", 2), + preserve_recent_skill_count=5, + preserve_recent_skill_tokens=10_000, + preserve_recent_skill_tokens_per_skill=10_000, + ) + + messages = [ + HumanMessage(content="u1"), + AIMessage( + content="", + tool_calls=[ + _skill_read_call("skill-1", "alpha"), + _skill_read_call("skill-2", "beta"), + ], + ), + ToolMessage(content="alpha skill body", tool_call_id="skill-1"), + HumanMessage(content="u2"), + AIMessage(content="done"), + ] + + middleware.before_model({"messages": messages}, _runtime()) + + preserved = captured[0].preserved_messages + summarized = captured[0].messages_to_summarize + + preserved_ai = next(m for m in preserved if isinstance(m, AIMessage) and m.tool_calls) + summarized_ai = next(m for m in summarized if isinstance(m, AIMessage) and m.tool_calls) + + assert [tc["id"] for tc in preserved_ai.tool_calls] == ["skill-1"] + assert [tc["id"] for tc in summarized_ai.tool_calls] == ["skill-2"] + assert any(isinstance(m, ToolMessage) and m.content == "alpha skill body" for m in preserved) + assert not any(isinstance(m, ToolMessage) and getattr(m, "tool_call_id", None) == "skill-2" for m in preserved) + + def test_memory_flush_hook_preserves_agent_scoped_memory(monkeypatch: pytest.MonkeyPatch) -> None: queue = MagicMock() monkeypatch.setattr("deerflow.agents.memory.summarization_hook.get_memory_config", lambda: MemoryConfig(enabled=True)) diff --git a/backend/tests/test_task_tool_core_logic.py b/backend/tests/test_task_tool_core_logic.py index 5251c69ed..1ae008df2 100644 --- a/backend/tests/test_task_tool_core_logic.py +++ b/backend/tests/test_task_tool_core_logic.py @@ -143,7 +143,7 @@ def test_task_tool_emits_running_and_completed_events(monkeypatch): monkeypatch.setattr(task_tool_module, "SubagentStatus", FakeSubagentStatus) monkeypatch.setattr(task_tool_module, "SubagentExecutor", DummyExecutor) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "Skills Appendix") + monkeypatch.setattr(task_tool_module, "get_background_task_result", lambda _: next(responses)) monkeypatch.setattr(task_tool_module, "get_stream_writer", lambda: events.append) monkeypatch.setattr(task_tool_module.asyncio, "sleep", _no_sleep) @@ -165,7 +165,9 @@ def test_task_tool_emits_running_and_completed_events(monkeypatch): assert captured["executor_kwargs"]["thread_id"] == "thread-1" assert captured["executor_kwargs"]["parent_model"] == "ark-model" assert captured["executor_kwargs"]["config"].max_turns == 7 - assert "Skills Appendix" in captured["executor_kwargs"]["config"].system_prompt + # Skills are no longer appended to system_prompt; they are loaded per-session + # by SubagentExecutor and injected as conversation items (Codex pattern). + assert captured["executor_kwargs"]["config"].system_prompt == "Base system prompt" get_available_tools.assert_called_once_with(model_name="ark-model", groups=None, subagent_enabled=False) @@ -199,7 +201,6 @@ def test_task_tool_propagates_tool_groups_to_subagent(monkeypatch): monkeypatch.setattr(task_tool_module, "SubagentStatus", FakeSubagentStatus) monkeypatch.setattr(task_tool_module, "SubagentExecutor", DummyExecutor) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") monkeypatch.setattr( task_tool_module, "get_background_task_result", @@ -222,6 +223,90 @@ def test_task_tool_propagates_tool_groups_to_subagent(monkeypatch): get_available_tools.assert_called_once_with(model_name="ark-model", groups=parent_tool_groups, subagent_enabled=False) +def test_task_tool_inherits_parent_skill_allowlist_for_default_subagent(monkeypatch): + config = _make_subagent_config() + runtime = _make_runtime() + runtime.config["metadata"]["available_skills"] = ["safe-skill"] + events = [] + captured = {} + + class DummyExecutor: + def __init__(self, **kwargs): + captured["config"] = kwargs["config"] + + def execute_async(self, prompt, task_id=None): + return task_id or "generated-task-id" + + monkeypatch.setattr(task_tool_module, "SubagentStatus", FakeSubagentStatus) + monkeypatch.setattr(task_tool_module, "SubagentExecutor", DummyExecutor) + monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) + monkeypatch.setattr( + task_tool_module, + "get_background_task_result", + lambda _: _make_result(FakeSubagentStatus.COMPLETED, result="done"), + ) + monkeypatch.setattr(task_tool_module, "get_stream_writer", lambda: events.append) + monkeypatch.setattr(task_tool_module.asyncio, "sleep", _no_sleep) + monkeypatch.setattr("deerflow.tools.get_available_tools", MagicMock(return_value=[])) + + output = _run_task_tool( + runtime=runtime, + description="执行任务", + prompt="use skills", + subagent_type="general-purpose", + tool_call_id="tc-skills", + ) + + assert output == "Task Succeeded. Result: done" + assert captured["config"].skills == ["safe-skill"] + + +def test_task_tool_intersects_parent_and_subagent_skill_allowlists(monkeypatch): + config = _make_subagent_config() + config = SubagentConfig( + name=config.name, + description=config.description, + system_prompt=config.system_prompt, + max_turns=config.max_turns, + timeout_seconds=config.timeout_seconds, + skills=["safe-skill", "other-skill"], + ) + runtime = _make_runtime() + runtime.config["metadata"]["available_skills"] = ["safe-skill"] + events = [] + captured = {} + + class DummyExecutor: + def __init__(self, **kwargs): + captured["config"] = kwargs["config"] + + def execute_async(self, prompt, task_id=None): + return task_id or "generated-task-id" + + monkeypatch.setattr(task_tool_module, "SubagentStatus", FakeSubagentStatus) + monkeypatch.setattr(task_tool_module, "SubagentExecutor", DummyExecutor) + monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) + monkeypatch.setattr( + task_tool_module, + "get_background_task_result", + lambda _: _make_result(FakeSubagentStatus.COMPLETED, result="done"), + ) + monkeypatch.setattr(task_tool_module, "get_stream_writer", lambda: events.append) + monkeypatch.setattr(task_tool_module.asyncio, "sleep", _no_sleep) + monkeypatch.setattr("deerflow.tools.get_available_tools", MagicMock(return_value=[])) + + output = _run_task_tool( + runtime=runtime, + description="执行任务", + prompt="use skills", + subagent_type="general-purpose", + tool_call_id="tc-skills-intersection", + ) + + assert output == "Task Succeeded. Result: done" + assert captured["config"].skills == ["safe-skill"] + + def test_task_tool_no_tool_groups_passes_none(monkeypatch): """Verify that when metadata has no tool_groups, groups=None is passed (backward compat).""" config = _make_subagent_config() @@ -240,7 +325,6 @@ def test_task_tool_no_tool_groups_passes_none(monkeypatch): monkeypatch.setattr(task_tool_module, "SubagentStatus", FakeSubagentStatus) monkeypatch.setattr(task_tool_module, "SubagentExecutor", DummyExecutor) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") monkeypatch.setattr( task_tool_module, "get_background_task_result", @@ -279,7 +363,6 @@ def test_task_tool_runtime_none_passes_groups_none(monkeypatch): monkeypatch.setattr(task_tool_module, "SubagentStatus", FakeSubagentStatus) monkeypatch.setattr(task_tool_module, "SubagentExecutor", DummyExecutor) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") monkeypatch.setattr( task_tool_module, "get_background_task_result", @@ -311,7 +394,7 @@ def test_task_tool_runtime_none_passes_groups_none(monkeypatch): type("DummyExecutor", (), {"__init__": lambda self, **kwargs: None, "execute_async": lambda self, prompt, task_id=None: task_id}), ) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") + monkeypatch.setattr( task_tool_module, "get_background_task_result", @@ -345,7 +428,7 @@ def test_task_tool_returns_timed_out_message(monkeypatch): type("DummyExecutor", (), {"__init__": lambda self, **kwargs: None, "execute_async": lambda self, prompt, task_id=None: task_id}), ) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") + monkeypatch.setattr( task_tool_module, "get_background_task_result", @@ -381,7 +464,7 @@ def test_task_tool_polling_safety_timeout(monkeypatch): type("DummyExecutor", (), {"__init__": lambda self, **kwargs: None, "execute_async": lambda self, prompt, task_id=None: task_id}), ) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") + monkeypatch.setattr( task_tool_module, "get_background_task_result", @@ -417,7 +500,7 @@ def test_cleanup_called_on_completed(monkeypatch): type("DummyExecutor", (), {"__init__": lambda self, **kwargs: None, "execute_async": lambda self, prompt, task_id=None: task_id}), ) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") + monkeypatch.setattr( task_tool_module, "get_background_task_result", @@ -457,7 +540,7 @@ def test_cleanup_called_on_failed(monkeypatch): type("DummyExecutor", (), {"__init__": lambda self, **kwargs: None, "execute_async": lambda self, prompt, task_id=None: task_id}), ) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") + monkeypatch.setattr( task_tool_module, "get_background_task_result", @@ -497,7 +580,7 @@ def test_cleanup_called_on_timed_out(monkeypatch): type("DummyExecutor", (), {"__init__": lambda self, **kwargs: None, "execute_async": lambda self, prompt, task_id=None: task_id}), ) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") + monkeypatch.setattr( task_tool_module, "get_background_task_result", @@ -544,7 +627,7 @@ def test_cleanup_not_called_on_polling_safety_timeout(monkeypatch): type("DummyExecutor", (), {"__init__": lambda self, **kwargs: None, "execute_async": lambda self, prompt, task_id=None: task_id}), ) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") + monkeypatch.setattr( task_tool_module, "get_background_task_result", @@ -597,7 +680,7 @@ def test_cleanup_scheduled_on_cancellation(monkeypatch): type("DummyExecutor", (), {"__init__": lambda self, **kwargs: None, "execute_async": lambda self, prompt, task_id=None: task_id}), ) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") + monkeypatch.setattr(task_tool_module, "get_background_task_result", get_result) monkeypatch.setattr(task_tool_module, "get_stream_writer", lambda: events.append) monkeypatch.setattr(task_tool_module.asyncio, "sleep", cancel_on_first_sleep) @@ -648,7 +731,7 @@ def test_cancelled_cleanup_stops_after_timeout(monkeypatch): type("DummyExecutor", (), {"__init__": lambda self, **kwargs: None, "execute_async": lambda self, prompt, task_id=None: task_id}), ) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") + monkeypatch.setattr( task_tool_module, "get_background_task_result", @@ -703,7 +786,7 @@ def test_cancellation_calls_request_cancel(monkeypatch): type("DummyExecutor", (), {"__init__": lambda self, **kwargs: None, "execute_async": lambda self, prompt, task_id=None: task_id}), ) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") + monkeypatch.setattr( task_tool_module, "get_background_task_result", @@ -761,7 +844,7 @@ def test_task_tool_returns_cancelled_message(monkeypatch): type("DummyExecutor", (), {"__init__": lambda self, **kwargs: None, "execute_async": lambda self, prompt, task_id=None: task_id}), ) monkeypatch.setattr(task_tool_module, "get_subagent_config", lambda _: config) - monkeypatch.setattr(task_tool_module, "get_skills_prompt_section", lambda: "") + monkeypatch.setattr(task_tool_module, "get_background_task_result", lambda _: next(responses)) monkeypatch.setattr(task_tool_module, "get_stream_writer", lambda: events.append) monkeypatch.setattr(task_tool_module.asyncio, "sleep", _no_sleep) diff --git a/backend/tests/test_thread_meta_repo.py b/backend/tests/test_thread_meta_repo.py new file mode 100644 index 000000000..3a6532567 --- /dev/null +++ b/backend/tests/test_thread_meta_repo.py @@ -0,0 +1,178 @@ +"""Tests for ThreadMetaRepository (SQLAlchemy-backed).""" + +import pytest + +from deerflow.persistence.thread_meta import ThreadMetaRepository + + +async def _make_repo(tmp_path): + from deerflow.persistence.engine import get_session_factory, init_engine + + url = f"sqlite+aiosqlite:///{tmp_path / 'test.db'}" + await init_engine("sqlite", url=url, sqlite_dir=str(tmp_path)) + return ThreadMetaRepository(get_session_factory()) + + +async def _cleanup(): + from deerflow.persistence.engine import close_engine + + await close_engine() + + +class TestThreadMetaRepository: + @pytest.mark.anyio + async def test_create_and_get(self, tmp_path): + repo = await _make_repo(tmp_path) + record = await repo.create("t1") + assert record["thread_id"] == "t1" + assert record["status"] == "idle" + assert "created_at" in record + + fetched = await repo.get("t1") + assert fetched is not None + assert fetched["thread_id"] == "t1" + await _cleanup() + + @pytest.mark.anyio + async def test_create_with_assistant_id(self, tmp_path): + repo = await _make_repo(tmp_path) + record = await repo.create("t1", assistant_id="agent1") + assert record["assistant_id"] == "agent1" + await _cleanup() + + @pytest.mark.anyio + async def test_create_with_owner_and_display_name(self, tmp_path): + repo = await _make_repo(tmp_path) + record = await repo.create("t1", user_id="user1", display_name="My Thread") + assert record["user_id"] == "user1" + assert record["display_name"] == "My Thread" + await _cleanup() + + @pytest.mark.anyio + async def test_create_with_metadata(self, tmp_path): + repo = await _make_repo(tmp_path) + record = await repo.create("t1", metadata={"key": "value"}) + assert record["metadata"] == {"key": "value"} + await _cleanup() + + @pytest.mark.anyio + async def test_get_nonexistent(self, tmp_path): + repo = await _make_repo(tmp_path) + assert await repo.get("nonexistent") is None + await _cleanup() + + @pytest.mark.anyio + async def test_check_access_no_record_allows(self, tmp_path): + repo = await _make_repo(tmp_path) + assert await repo.check_access("unknown", "user1") is True + await _cleanup() + + @pytest.mark.anyio + async def test_check_access_owner_matches(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.create("t1", user_id="user1") + assert await repo.check_access("t1", "user1") is True + await _cleanup() + + @pytest.mark.anyio + async def test_check_access_owner_mismatch(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.create("t1", user_id="user1") + assert await repo.check_access("t1", "user2") is False + await _cleanup() + + @pytest.mark.anyio + async def test_check_access_no_owner_allows_all(self, tmp_path): + repo = await _make_repo(tmp_path) + # Explicit user_id=None to bypass the new AUTO default that + # would otherwise pick up the test user from the autouse fixture. + await repo.create("t1", user_id=None) + assert await repo.check_access("t1", "anyone") is True + await _cleanup() + + @pytest.mark.anyio + async def test_check_access_strict_missing_row_denied(self, tmp_path): + """require_existing=True flips the missing-row case to *denied*. + + Closes the delete-idempotence cross-user gap: after a thread is + deleted, the row is gone, and the permissive default would let any + caller "claim" it as untracked. The strict mode demands a row. + """ + repo = await _make_repo(tmp_path) + assert await repo.check_access("never-existed", "user1", require_existing=True) is False + await _cleanup() + + @pytest.mark.anyio + async def test_check_access_strict_owner_match_allowed(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.create("t1", user_id="user1") + assert await repo.check_access("t1", "user1", require_existing=True) is True + await _cleanup() + + @pytest.mark.anyio + async def test_check_access_strict_owner_mismatch_denied(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.create("t1", user_id="user1") + assert await repo.check_access("t1", "user2", require_existing=True) is False + await _cleanup() + + @pytest.mark.anyio + async def test_check_access_strict_null_owner_still_allowed(self, tmp_path): + """Even in strict mode, a row with NULL user_id stays shared. + + The strict flag tightens the *missing row* case, not the *shared + row* case — legacy pre-auth rows that survived a clean migration + without an owner are still everyone's. + """ + repo = await _make_repo(tmp_path) + await repo.create("t1", user_id=None) + assert await repo.check_access("t1", "anyone", require_existing=True) is True + await _cleanup() + + @pytest.mark.anyio + async def test_update_status(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.create("t1") + await repo.update_status("t1", "busy") + record = await repo.get("t1") + assert record["status"] == "busy" + await _cleanup() + + @pytest.mark.anyio + async def test_delete(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.create("t1") + await repo.delete("t1") + assert await repo.get("t1") is None + await _cleanup() + + @pytest.mark.anyio + async def test_delete_nonexistent_is_noop(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.delete("nonexistent") # should not raise + await _cleanup() + + @pytest.mark.anyio + async def test_update_metadata_merges(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.create("t1", metadata={"a": 1, "b": 2}) + await repo.update_metadata("t1", {"b": 99, "c": 3}) + record = await repo.get("t1") + # Existing key preserved, overlapping key overwritten, new key added + assert record["metadata"] == {"a": 1, "b": 99, "c": 3} + await _cleanup() + + @pytest.mark.anyio + async def test_update_metadata_on_empty(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.create("t1") + await repo.update_metadata("t1", {"k": "v"}) + record = await repo.get("t1") + assert record["metadata"] == {"k": "v"} + await _cleanup() + + @pytest.mark.anyio + async def test_update_metadata_nonexistent_is_noop(self, tmp_path): + repo = await _make_repo(tmp_path) + await repo.update_metadata("nonexistent", {"k": "v"}) # should not raise + await _cleanup() diff --git a/backend/tests/test_thread_run_messages_pagination.py b/backend/tests/test_thread_run_messages_pagination.py new file mode 100644 index 000000000..00e354a34 --- /dev/null +++ b/backend/tests/test_thread_run_messages_pagination.py @@ -0,0 +1,130 @@ +"""Tests for paginated GET /api/threads/{thread_id}/runs/{run_id}/messages endpoint.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, MagicMock + +from _router_auth_helpers import make_authed_test_app +from fastapi.testclient import TestClient + +from app.gateway.routers import thread_runs + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _make_app(event_store=None): + """Build a test FastAPI app with stub auth and mocked state.""" + app = make_authed_test_app() + app.include_router(thread_runs.router) + + if event_store is not None: + app.state.run_event_store = event_store + + return app + + +def _make_event_store(rows: list[dict]): + """Return an AsyncMock event store whose list_messages_by_run() returns rows.""" + store = MagicMock() + store.list_messages_by_run = AsyncMock(return_value=rows) + return store + + +def _make_message(seq: int) -> dict: + return {"seq": seq, "event_type": "ai_message", "category": "message", "content": f"msg-{seq}"} + + +# --------------------------------------------------------------------------- +# Tests +# --------------------------------------------------------------------------- + + +def test_returns_paginated_envelope(): + """GET /api/threads/{tid}/runs/{rid}/messages returns {data: [...], has_more: bool}.""" + rows = [_make_message(i) for i in range(1, 4)] + app = _make_app(event_store=_make_event_store(rows)) + with TestClient(app) as client: + response = client.get("/api/threads/thread-1/runs/run-1/messages") + assert response.status_code == 200 + body = response.json() + assert "data" in body + assert "has_more" in body + assert body["has_more"] is False + assert len(body["data"]) == 3 + + +def test_has_more_true_when_extra_row_returned(): + """has_more=True when event store returns limit+1 rows.""" + # Default limit is 50; provide 51 rows + rows = [_make_message(i) for i in range(1, 52)] # 51 rows + app = _make_app(event_store=_make_event_store(rows)) + with TestClient(app) as client: + response = client.get("/api/threads/thread-2/runs/run-2/messages") + assert response.status_code == 200 + body = response.json() + assert body["has_more"] is True + assert len(body["data"]) == 50 # trimmed to limit + + +def test_after_seq_forwarded_to_event_store(): + """after_seq query param is forwarded to event_store.list_messages_by_run.""" + rows = [_make_message(10)] + event_store = _make_event_store(rows) + app = _make_app(event_store=event_store) + with TestClient(app) as client: + response = client.get("/api/threads/thread-3/runs/run-3/messages?after_seq=5") + assert response.status_code == 200 + event_store.list_messages_by_run.assert_awaited_once_with( + "thread-3", + "run-3", + limit=51, # default limit(50) + 1 + before_seq=None, + after_seq=5, + ) + + +def test_before_seq_forwarded_to_event_store(): + """before_seq query param is forwarded to event_store.list_messages_by_run.""" + rows = [_make_message(3)] + event_store = _make_event_store(rows) + app = _make_app(event_store=event_store) + with TestClient(app) as client: + response = client.get("/api/threads/thread-4/runs/run-4/messages?before_seq=10") + assert response.status_code == 200 + event_store.list_messages_by_run.assert_awaited_once_with( + "thread-4", + "run-4", + limit=51, + before_seq=10, + after_seq=None, + ) + + +def test_custom_limit_forwarded_to_event_store(): + """Custom limit is forwarded as limit+1 to the event store.""" + rows = [_make_message(i) for i in range(1, 6)] + event_store = _make_event_store(rows) + app = _make_app(event_store=event_store) + with TestClient(app) as client: + response = client.get("/api/threads/thread-5/runs/run-5/messages?limit=10") + assert response.status_code == 200 + event_store.list_messages_by_run.assert_awaited_once_with( + "thread-5", + "run-5", + limit=11, # 10 + 1 + before_seq=None, + after_seq=None, + ) + + +def test_empty_data_when_no_messages(): + """Returns empty data list with has_more=False when no messages exist.""" + app = _make_app(event_store=_make_event_store([])) + with TestClient(app) as client: + response = client.get("/api/threads/thread-6/runs/run-6/messages") + assert response.status_code == 200 + body = response.json() + assert body["data"] == [] + assert body["has_more"] is False diff --git a/backend/tests/test_threads_router.py b/backend/tests/test_threads_router.py index ad3abe4e9..4ffa28a8c 100644 --- a/backend/tests/test_threads_router.py +++ b/backend/tests/test_threads_router.py @@ -1,7 +1,8 @@ from unittest.mock import patch import pytest -from fastapi import FastAPI, HTTPException +from _router_auth_helpers import make_authed_test_app +from fastapi import HTTPException from fastapi.testclient import TestClient from app.gateway.routers import threads @@ -49,12 +50,15 @@ def test_delete_thread_data_rejects_invalid_thread_id(tmp_path): def test_delete_thread_route_cleans_thread_directory(tmp_path): - paths = Paths(tmp_path) - thread_dir = paths.thread_dir("thread-route") - paths.sandbox_work_dir("thread-route").mkdir(parents=True, exist_ok=True) - (paths.sandbox_work_dir("thread-route") / "notes.txt").write_text("hello", encoding="utf-8") + from deerflow.runtime.user_context import get_effective_user_id - app = FastAPI() + paths = Paths(tmp_path) + user_id = get_effective_user_id() + thread_dir = paths.thread_dir("thread-route", user_id=user_id) + paths.sandbox_work_dir("thread-route", user_id=user_id).mkdir(parents=True, exist_ok=True) + (paths.sandbox_work_dir("thread-route", user_id=user_id) / "notes.txt").write_text("hello", encoding="utf-8") + + app = make_authed_test_app() app.include_router(threads.router) with patch("app.gateway.routers.threads.get_paths", return_value=paths): @@ -69,7 +73,7 @@ def test_delete_thread_route_cleans_thread_directory(tmp_path): def test_delete_thread_route_rejects_invalid_thread_id(tmp_path): paths = Paths(tmp_path) - app = FastAPI() + app = make_authed_test_app() app.include_router(threads.router) with patch("app.gateway.routers.threads.get_paths", return_value=paths): @@ -82,7 +86,7 @@ def test_delete_thread_route_rejects_invalid_thread_id(tmp_path): def test_delete_thread_route_returns_422_for_route_safe_invalid_id(tmp_path): paths = Paths(tmp_path) - app = FastAPI() + app = make_authed_test_app() app.include_router(threads.router) with patch("app.gateway.routers.threads.get_paths", return_value=paths): @@ -107,3 +111,28 @@ def test_delete_thread_data_returns_generic_500_error(tmp_path): assert exc_info.value.detail == "Failed to delete local thread data." assert "/secret/path" not in exc_info.value.detail log_exception.assert_called_once_with("Failed to delete thread data for %s", "thread-cleanup") + + +# ── Server-reserved metadata key stripping ────────────────────────────────── + + +def test_strip_reserved_metadata_removes_user_id(): + """Client-supplied user_id is dropped to prevent reflection attacks.""" + out = threads._strip_reserved_metadata({"user_id": "victim-id", "title": "ok"}) + assert out == {"title": "ok"} + + +def test_strip_reserved_metadata_passes_through_safe_keys(): + """Non-reserved keys are preserved verbatim.""" + md = {"title": "ok", "tags": ["a", "b"], "custom": {"x": 1}} + assert threads._strip_reserved_metadata(md) == md + + +def test_strip_reserved_metadata_empty_input(): + """Empty / None metadata returns same object — no crash.""" + assert threads._strip_reserved_metadata({}) == {} + + +def test_strip_reserved_metadata_strips_all_reserved_keys(): + out = threads._strip_reserved_metadata({"user_id": "x", "keep": "me"}) + assert out == {"keep": "me"} diff --git a/backend/tests/test_title_middleware_core_logic.py b/backend/tests/test_title_middleware_core_logic.py index ce7376e2e..afd10f2b3 100644 --- a/backend/tests/test_title_middleware_core_logic.py +++ b/backend/tests/test_title_middleware_core_logic.py @@ -93,6 +93,10 @@ class TestTitleMiddlewareCoreLogic: assert title == "短标题" title_middleware_module.create_chat_model.assert_called_once_with(thinking_enabled=False) model.ainvoke.assert_awaited_once() + assert model.ainvoke.await_args.kwargs["config"] == { + "run_name": "title_agent", + "tags": ["middleware:title"], + } def test_generate_title_normalizes_structured_message_content(self, monkeypatch): _set_test_title_config(max_chars=20) diff --git a/backend/tests/test_tool_error_handling_middleware.py b/backend/tests/test_tool_error_handling_middleware.py index 698a0d8cb..4add370f0 100644 --- a/backend/tests/test_tool_error_handling_middleware.py +++ b/backend/tests/test_tool_error_handling_middleware.py @@ -1,10 +1,32 @@ -from types import SimpleNamespace +import sys +from types import ModuleType, SimpleNamespace import pytest from langchain_core.messages import ToolMessage from langgraph.errors import GraphInterrupt -from deerflow.agents.middlewares.tool_error_handling_middleware import ToolErrorHandlingMiddleware +from deerflow.agents.middlewares.tool_error_handling_middleware import ( + ToolErrorHandlingMiddleware, + build_subagent_runtime_middlewares, +) +from deerflow.config.app_config import AppConfig, CircuitBreakerConfig +from deerflow.config.guardrails_config import GuardrailsConfig +from deerflow.config.sandbox_config import SandboxConfig + + +def _module(name: str, **attrs): + module = ModuleType(name) + for key, value in attrs.items(): + setattr(module, key, value) + return module + + +def _make_app_config() -> AppConfig: + return AppConfig( + sandbox=SandboxConfig(use="test"), + guardrails=GuardrailsConfig(enabled=False), + circuit_breaker=CircuitBreakerConfig(failure_threshold=7, recovery_timeout_sec=11), + ) def _request(name: str = "web_search", tool_call_id: str | None = "tc-1"): @@ -14,6 +36,56 @@ def _request(name: str = "web_search", tool_call_id: str | None = "tc-1"): return SimpleNamespace(tool_call=tool_call) +def test_build_subagent_runtime_middlewares_threads_app_config_to_llm_middleware(monkeypatch: pytest.MonkeyPatch): + captured: dict[str, object] = {} + + class FakeMiddleware: + def __init__(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + + class FakeLLMErrorHandlingMiddleware: + def __init__(self, *, app_config): + captured["app_config"] = app_config + + app_config = _make_app_config() + + monkeypatch.setitem( + sys.modules, + "deerflow.agents.middlewares.llm_error_handling_middleware", + _module( + "deerflow.agents.middlewares.llm_error_handling_middleware", + LLMErrorHandlingMiddleware=FakeLLMErrorHandlingMiddleware, + ), + ) + monkeypatch.setitem( + sys.modules, + "deerflow.agents.middlewares.thread_data_middleware", + _module("deerflow.agents.middlewares.thread_data_middleware", ThreadDataMiddleware=FakeMiddleware), + ) + monkeypatch.setitem( + sys.modules, + "deerflow.sandbox.middleware", + _module("deerflow.sandbox.middleware", SandboxMiddleware=FakeMiddleware), + ) + monkeypatch.setitem( + sys.modules, + "deerflow.agents.middlewares.dangling_tool_call_middleware", + _module("deerflow.agents.middlewares.dangling_tool_call_middleware", DanglingToolCallMiddleware=FakeMiddleware), + ) + monkeypatch.setitem( + sys.modules, + "deerflow.agents.middlewares.sandbox_audit_middleware", + _module("deerflow.agents.middlewares.sandbox_audit_middleware", SandboxAuditMiddleware=FakeMiddleware), + ) + + middlewares = build_subagent_runtime_middlewares(app_config=app_config, lazy_init=False) + + assert captured["app_config"] is app_config + assert len(middlewares) == 6 + assert isinstance(middlewares[-1], ToolErrorHandlingMiddleware) + + def test_wrap_tool_call_passthrough_on_success(): middleware = ToolErrorHandlingMiddleware() req = _request() diff --git a/backend/tests/test_tool_search.py b/backend/tests/test_tool_search.py index 8f71144c5..428bfec3d 100644 --- a/backend/tests/test_tool_search.py +++ b/backend/tests/test_tool_search.py @@ -2,8 +2,10 @@ import json import sys +from types import SimpleNamespace import pytest +from langchain_core.messages import ToolMessage from langchain_core.tools import tool as langchain_tool from deerflow.config.tool_search_config import ToolSearchConfig, load_tool_search_config_from_dict @@ -83,6 +85,16 @@ class TestDeferredToolRegistry: assert "github_create_issue" in names assert "slack_send_message" in names + def test_deferred_names(self, registry): + names = registry.deferred_names + assert "github_create_issue" in names + assert "slack_send_message" in names + assert len(names) == 6 + + def test_contains(self, registry): + assert registry.contains("github_create_issue") is True + assert registry.contains("not_registered") is False + def test_search_select_single(self, registry): results = registry.search("select:github_create_issue") assert len(results) == 1 @@ -509,3 +521,89 @@ class TestToolSearchPromotion: assert "slack_send_message" not in remaining assert "slack_list_channels" not in remaining assert len(registry) == 4 + + +class TestDeferredToolExecutionGate: + def test_unpromoted_deferred_tool_call_is_blocked(self, registry): + from deerflow.agents.middlewares.deferred_tool_filter_middleware import DeferredToolFilterMiddleware + + set_deferred_registry(registry) + middleware = DeferredToolFilterMiddleware() + request = SimpleNamespace(tool_call={"name": "github_create_issue", "id": "call-1"}) + called = False + + def handler(_request): + nonlocal called + called = True + return ToolMessage(content="executed", tool_call_id="call-1", name="github_create_issue") + + result = middleware.wrap_tool_call(request, handler) + + assert called is False + assert isinstance(result, ToolMessage) + assert result.status == "error" + assert result.tool_call_id == "call-1" + assert "tool_search" in result.content + assert "github_create_issue" in result.content + + def test_promoted_deferred_tool_call_is_allowed(self, registry): + from deerflow.agents.middlewares.deferred_tool_filter_middleware import DeferredToolFilterMiddleware + + registry.promote({"github_create_issue"}) + set_deferred_registry(registry) + middleware = DeferredToolFilterMiddleware() + request = SimpleNamespace(tool_call={"name": "github_create_issue", "id": "call-1"}) + called = False + + def handler(_request): + nonlocal called + called = True + return ToolMessage(content="executed", tool_call_id="call-1", name="github_create_issue") + + result = middleware.wrap_tool_call(request, handler) + + assert called is True + assert isinstance(result, ToolMessage) + assert result.content == "executed" + + def test_non_deferred_tool_call_is_allowed(self, registry): + from deerflow.agents.middlewares.deferred_tool_filter_middleware import DeferredToolFilterMiddleware + + set_deferred_registry(registry) + middleware = DeferredToolFilterMiddleware() + request = SimpleNamespace(tool_call={"name": "local_tool", "id": "call-1"}) + called = False + + def handler(_request): + nonlocal called + called = True + return ToolMessage(content="executed", tool_call_id="call-1", name="local_tool") + + result = middleware.wrap_tool_call(request, handler) + + assert called is True + assert isinstance(result, ToolMessage) + assert result.content == "executed" + + @pytest.mark.anyio + async def test_unpromoted_deferred_tool_call_is_blocked_async(self, registry): + from deerflow.agents.middlewares.deferred_tool_filter_middleware import DeferredToolFilterMiddleware + + set_deferred_registry(registry) + middleware = DeferredToolFilterMiddleware() + request = SimpleNamespace(tool_call={"name": "github_create_issue", "id": "call-1"}) + called = False + + async def handler(_request): + nonlocal called + called = True + return ToolMessage(content="executed", tool_call_id="call-1", name="github_create_issue") + + result = await middleware.awrap_tool_call(request, handler) + + assert called is False + assert isinstance(result, ToolMessage) + assert result.status == "error" + assert result.tool_call_id == "call-1" + assert "tool_search" in result.content + assert "github_create_issue" in result.content diff --git a/backend/tests/test_uploads_middleware_core_logic.py b/backend/tests/test_uploads_middleware_core_logic.py index 1837c1286..6e39cda46 100644 --- a/backend/tests/test_uploads_middleware_core_logic.py +++ b/backend/tests/test_uploads_middleware_core_logic.py @@ -34,7 +34,9 @@ def _runtime(thread_id: str | None = THREAD_ID) -> MagicMock: def _uploads_dir(tmp_path: Path, thread_id: str = THREAD_ID) -> Path: - d = Paths(str(tmp_path)).sandbox_uploads_dir(thread_id) + from deerflow.runtime.user_context import get_effective_user_id + + d = Paths(str(tmp_path)).sandbox_uploads_dir(thread_id, user_id=get_effective_user_id()) d.mkdir(parents=True, exist_ok=True) return d diff --git a/backend/tests/test_uploads_router.py b/backend/tests/test_uploads_router.py index f305b998f..7f9b442d0 100644 --- a/backend/tests/test_uploads_router.py +++ b/backend/tests/test_uploads_router.py @@ -2,8 +2,10 @@ import asyncio import stat from io import BytesIO from pathlib import Path +from types import SimpleNamespace from unittest.mock import AsyncMock, MagicMock, patch +from _router_auth_helpers import call_unwrapped from fastapi import UploadFile from app.gateway.routers import uploads @@ -25,7 +27,7 @@ def test_upload_files_writes_thread_storage_and_skips_local_sandbox_sync(tmp_pat patch.object(uploads, "get_sandbox_provider", return_value=provider), ): file = UploadFile(filename="notes.txt", file=BytesIO(b"hello uploads")) - result = asyncio.run(uploads.upload_files("thread-local", files=[file])) + result = asyncio.run(call_unwrapped(uploads.upload_files, "thread-local", request=MagicMock(), files=[file], config=SimpleNamespace())) assert result.success is True assert len(result.files) == 1 @@ -48,7 +50,7 @@ def test_upload_files_skips_acquire_when_thread_data_is_mounted(tmp_path): patch.object(uploads, "get_sandbox_provider", return_value=provider), ): file = UploadFile(filename="notes.txt", file=BytesIO(b"hello uploads")) - result = asyncio.run(uploads.upload_files("thread-mounted", files=[file])) + result = asyncio.run(call_unwrapped(uploads.upload_files, "thread-mounted", request=MagicMock(), files=[file], config=SimpleNamespace())) assert result.success is True assert (thread_uploads_dir / "notes.txt").read_bytes() == b"hello uploads" @@ -74,7 +76,7 @@ def test_upload_files_does_not_auto_convert_documents_by_default(tmp_path): patch.object(uploads, "convert_file_to_markdown", AsyncMock()) as convert_mock, ): file = UploadFile(filename="report.pdf", file=BytesIO(b"pdf-bytes")) - result = asyncio.run(uploads.upload_files("thread-local", files=[file])) + result = asyncio.run(call_unwrapped(uploads.upload_files, "thread-local", request=MagicMock(), files=[file], config=SimpleNamespace())) assert result.success is True assert len(result.files) == 1 @@ -107,7 +109,7 @@ def test_upload_files_syncs_non_local_sandbox_and_marks_markdown_file(tmp_path): patch.object(uploads, "convert_file_to_markdown", AsyncMock(side_effect=fake_convert)), ): file = UploadFile(filename="report.pdf", file=BytesIO(b"pdf-bytes")) - result = asyncio.run(uploads.upload_files("thread-aio", files=[file])) + result = asyncio.run(call_unwrapped(uploads.upload_files, "thread-aio", request=MagicMock(), files=[file], config=SimpleNamespace())) assert result.success is True assert len(result.files) == 1 @@ -146,7 +148,7 @@ def test_upload_files_makes_non_local_files_sandbox_writable(tmp_path): patch.object(uploads, "_make_file_sandbox_writable") as make_writable, ): file = UploadFile(filename="report.pdf", file=BytesIO(b"pdf-bytes")) - result = asyncio.run(uploads.upload_files("thread-aio", files=[file])) + result = asyncio.run(call_unwrapped(uploads.upload_files, "thread-aio", request=MagicMock(), files=[file], config=SimpleNamespace())) assert result.success is True make_writable.assert_any_call(thread_uploads_dir / "report.pdf") @@ -170,7 +172,7 @@ def test_upload_files_does_not_adjust_permissions_for_local_sandbox(tmp_path): patch.object(uploads, "_make_file_sandbox_writable") as make_writable, ): file = UploadFile(filename="notes.txt", file=BytesIO(b"hello uploads")) - result = asyncio.run(uploads.upload_files("thread-local", files=[file])) + result = asyncio.run(call_unwrapped(uploads.upload_files, "thread-local", request=MagicMock(), files=[file], config=SimpleNamespace())) assert result.success is True make_writable.assert_not_called() @@ -221,13 +223,13 @@ def test_upload_files_rejects_dotdot_and_dot_filenames(tmp_path): # These filenames must be rejected outright for bad_name in ["..", "."]: file = UploadFile(filename=bad_name, file=BytesIO(b"data")) - result = asyncio.run(uploads.upload_files("thread-local", files=[file])) + result = asyncio.run(call_unwrapped(uploads.upload_files, "thread-local", request=MagicMock(), files=[file], config=SimpleNamespace())) assert result.success is True assert result.files == [], f"Expected no files for unsafe filename {bad_name!r}" # Path-traversal prefixes are stripped to the basename and accepted safely file = UploadFile(filename="../etc/passwd", file=BytesIO(b"data")) - result = asyncio.run(uploads.upload_files("thread-local", files=[file])) + result = asyncio.run(call_unwrapped(uploads.upload_files, "thread-local", request=MagicMock(), files=[file], config=SimpleNamespace())) assert result.success is True assert len(result.files) == 1 assert result.files[0]["filename"] == "passwd" @@ -243,7 +245,7 @@ def test_delete_uploaded_file_removes_generated_markdown_companion(tmp_path): (thread_uploads_dir / "report.md").write_text("converted", encoding="utf-8") with patch.object(uploads, "get_uploads_dir", return_value=thread_uploads_dir): - result = asyncio.run(uploads.delete_uploaded_file("thread-aio", "report.pdf")) + result = asyncio.run(call_unwrapped(uploads.delete_uploaded_file, "thread-aio", "report.pdf", request=MagicMock())) assert result == {"success": True, "message": "Deleted report.pdf"} assert not (thread_uploads_dir / "report.pdf").exists() @@ -251,16 +253,20 @@ def test_delete_uploaded_file_removes_generated_markdown_companion(tmp_path): def test_auto_convert_documents_enabled_defaults_to_false_on_config_errors(): - with patch.object(uploads, "get_app_config", side_effect=RuntimeError("boom")): - assert uploads._auto_convert_documents_enabled() is False + class BrokenConfig: + def __getattribute__(self, name): + if name == "uploads": + raise RuntimeError("boom") + return super().__getattribute__(name) + + assert uploads._auto_convert_documents_enabled(BrokenConfig()) is False def test_auto_convert_documents_enabled_reads_dict_backed_uploads_config(): cfg = MagicMock() cfg.uploads = {"auto_convert_documents": True} - with patch.object(uploads, "get_app_config", return_value=cfg): - assert uploads._auto_convert_documents_enabled() is True + assert uploads._auto_convert_documents_enabled(cfg) is True def test_auto_convert_documents_enabled_accepts_boolean_and_string_truthy_values(): @@ -276,11 +282,7 @@ def test_auto_convert_documents_enabled_accepts_boolean_and_string_truthy_values string_false_cfg = MagicMock() string_false_cfg.uploads = MagicMock(auto_convert_documents="false") - with patch.object(uploads, "get_app_config", return_value=false_cfg): - assert uploads._auto_convert_documents_enabled() is False - with patch.object(uploads, "get_app_config", return_value=true_cfg): - assert uploads._auto_convert_documents_enabled() is True - with patch.object(uploads, "get_app_config", return_value=string_true_cfg): - assert uploads._auto_convert_documents_enabled() is True - with patch.object(uploads, "get_app_config", return_value=string_false_cfg): - assert uploads._auto_convert_documents_enabled() is False + assert uploads._auto_convert_documents_enabled(false_cfg) is False + assert uploads._auto_convert_documents_enabled(true_cfg) is True + assert uploads._auto_convert_documents_enabled(string_true_cfg) is True + assert uploads._auto_convert_documents_enabled(string_false_cfg) is False diff --git a/backend/tests/test_user_context.py b/backend/tests/test_user_context.py new file mode 100644 index 000000000..111ffb679 --- /dev/null +++ b/backend/tests/test_user_context.py @@ -0,0 +1,111 @@ +"""Tests for runtime.user_context — contextvar three-state semantics. + +These tests opt out of the autouse contextvar fixture (added in +commit 6) because they explicitly test the cases where the contextvar +is set or unset. +""" + +from types import SimpleNamespace + +import pytest + +from deerflow.runtime.user_context import ( + DEFAULT_USER_ID, + CurrentUser, + get_current_user, + get_effective_user_id, + require_current_user, + reset_current_user, + set_current_user, +) + + +@pytest.mark.no_auto_user +def test_default_is_none(): + """Before any set, contextvar returns None.""" + assert get_current_user() is None + + +@pytest.mark.no_auto_user +def test_set_and_reset_roundtrip(): + """set_current_user returns a token that reset restores.""" + user = SimpleNamespace(id="user-1") + token = set_current_user(user) + try: + assert get_current_user() is user + finally: + reset_current_user(token) + assert get_current_user() is None + + +@pytest.mark.no_auto_user +def test_require_current_user_raises_when_unset(): + """require_current_user raises RuntimeError if contextvar is unset.""" + assert get_current_user() is None + with pytest.raises(RuntimeError, match="without user context"): + require_current_user() + + +@pytest.mark.no_auto_user +def test_require_current_user_returns_user_when_set(): + """require_current_user returns the user when contextvar is set.""" + user = SimpleNamespace(id="user-2") + token = set_current_user(user) + try: + assert require_current_user() is user + finally: + reset_current_user(token) + + +@pytest.mark.no_auto_user +def test_protocol_accepts_duck_typed(): + """CurrentUser is a runtime_checkable Protocol matching any .id-bearing object.""" + user = SimpleNamespace(id="user-3") + assert isinstance(user, CurrentUser) + + +@pytest.mark.no_auto_user +def test_protocol_rejects_no_id(): + """Objects without .id do not satisfy CurrentUser Protocol.""" + not_a_user = SimpleNamespace(email="no-id@example.com") + assert not isinstance(not_a_user, CurrentUser) + + +# --------------------------------------------------------------------------- +# get_effective_user_id / DEFAULT_USER_ID tests +# --------------------------------------------------------------------------- + + +def test_default_user_id_is_default(): + assert DEFAULT_USER_ID == "default" + + +@pytest.mark.no_auto_user +def test_effective_user_id_returns_default_when_no_user(): + """No user in context -> fallback to DEFAULT_USER_ID.""" + assert get_effective_user_id() == "default" + + +@pytest.mark.no_auto_user +def test_effective_user_id_returns_user_id_when_set(): + user = SimpleNamespace(id="u-abc-123") + token = set_current_user(user) + try: + assert get_effective_user_id() == "u-abc-123" + finally: + reset_current_user(token) + + +@pytest.mark.no_auto_user +def test_effective_user_id_coerces_to_str(): + """User.id might be a UUID object; must come back as str.""" + import uuid + + uid = uuid.uuid4() + + user = SimpleNamespace(id=uid) + token = set_current_user(user) + try: + assert get_effective_user_id() == str(uid) + finally: + reset_current_user(token) diff --git a/backend/tests/test_view_image_tool.py b/backend/tests/test_view_image_tool.py new file mode 100644 index 000000000..eb7db890c --- /dev/null +++ b/backend/tests/test_view_image_tool.py @@ -0,0 +1,164 @@ +import base64 +import importlib +import os +from pathlib import Path +from types import SimpleNamespace + +import pytest + +from deerflow.tools.builtins.view_image_tool import view_image_tool + +view_image_module = importlib.import_module("deerflow.tools.builtins.view_image_tool") + +PNG_BYTES = base64.b64decode("iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==") + + +def _make_thread_data(tmp_path: Path) -> dict[str, str]: + user_data = tmp_path / "threads" / "thread-1" / "user-data" + workspace = user_data / "workspace" + uploads = user_data / "uploads" + outputs = user_data / "outputs" + for directory in (workspace, uploads, outputs): + directory.mkdir(parents=True) + + return { + "workspace_path": str(workspace), + "uploads_path": str(uploads), + "outputs_path": str(outputs), + } + + +def _make_runtime(thread_data: dict[str, str]) -> SimpleNamespace: + return SimpleNamespace( + state={"thread_data": thread_data}, + context={"thread_id": "thread-1"}, + config={}, + ) + + +def _message_content(result) -> str: + return result.update["messages"][0].content + + +def test_view_image_rejects_external_absolute_path(tmp_path: Path) -> None: + thread_data = _make_thread_data(tmp_path) + outside_image = tmp_path / "outside.png" + outside_image.write_bytes(PNG_BYTES) + + result = view_image_tool.func( + runtime=_make_runtime(thread_data), + image_path=str(outside_image), + tool_call_id="tc-external", + ) + + assert "Only image paths under /mnt/user-data" in _message_content(result) + assert "viewed_images" not in result.update + + +def test_view_image_reads_virtual_uploads_path(tmp_path: Path) -> None: + thread_data = _make_thread_data(tmp_path) + image_path = Path(thread_data["uploads_path"]) / "sample.png" + image_path.write_bytes(PNG_BYTES) + + result = view_image_tool.func( + runtime=_make_runtime(thread_data), + image_path="/mnt/user-data/uploads/sample.png", + tool_call_id="tc-uploads", + ) + + assert _message_content(result) == "Successfully read image" + viewed_image = result.update["viewed_images"]["/mnt/user-data/uploads/sample.png"] + assert viewed_image["base64"] == base64.b64encode(PNG_BYTES).decode("utf-8") + assert viewed_image["mime_type"] == "image/png" + + +def test_view_image_rejects_spoofed_extension(tmp_path: Path) -> None: + thread_data = _make_thread_data(tmp_path) + image_path = Path(thread_data["uploads_path"]) / "not-really.png" + image_path.write_bytes(b"not an image") + + result = view_image_tool.func( + runtime=_make_runtime(thread_data), + image_path="/mnt/user-data/uploads/not-really.png", + tool_call_id="tc-spoofed", + ) + + assert "contents do not match" in _message_content(result) + assert "viewed_images" not in result.update + + +def test_view_image_rejects_mismatched_magic_bytes(tmp_path: Path) -> None: + thread_data = _make_thread_data(tmp_path) + image_path = Path(thread_data["uploads_path"]) / "jpeg-named-png.png" + image_path.write_bytes(b"\xff\xd8\xff\xe0fake-jpeg") + + result = view_image_tool.func( + runtime=_make_runtime(thread_data), + image_path="/mnt/user-data/uploads/jpeg-named-png.png", + tool_call_id="tc-mismatch", + ) + + assert "file extension indicates image/png" in _message_content(result) + assert "viewed_images" not in result.update + + +def test_view_image_rejects_oversized_image(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: + thread_data = _make_thread_data(tmp_path) + image_path = Path(thread_data["uploads_path"]) / "sample.png" + image_path.write_bytes(PNG_BYTES) + monkeypatch.setattr(view_image_module, "_MAX_IMAGE_BYTES", len(PNG_BYTES) - 1) + + result = view_image_tool.func( + runtime=_make_runtime(thread_data), + image_path="/mnt/user-data/uploads/sample.png", + tool_call_id="tc-oversized", + ) + + assert "Image file is too large" in _message_content(result) + assert "viewed_images" not in result.update + + +def test_view_image_sanitizes_read_errors(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: + thread_data = _make_thread_data(tmp_path) + image_path = Path(thread_data["uploads_path"]) / "sample.png" + image_path.write_bytes(PNG_BYTES) + + def _open(*args, **kwargs): + raise PermissionError(f"permission denied: {image_path}") + + monkeypatch.setattr("builtins.open", _open) + + result = view_image_tool.func( + runtime=_make_runtime(thread_data), + image_path="/mnt/user-data/uploads/sample.png", + tool_call_id="tc-read-error", + ) + + message = _message_content(result) + assert "Error reading image file" in message + assert str(image_path) not in message + assert str(Path(thread_data["uploads_path"])) not in message + assert "/mnt/user-data/uploads/sample.png" in message + assert "viewed_images" not in result.update + + +@pytest.mark.skipif(os.name == "nt", reason="symlink semantics differ on Windows") +def test_view_image_rejects_uploads_symlink_escape(tmp_path: Path) -> None: + thread_data = _make_thread_data(tmp_path) + outside_image = tmp_path / "outside-target.png" + outside_image.write_bytes(PNG_BYTES) + + link_path = Path(thread_data["uploads_path"]) / "escape.png" + try: + link_path.symlink_to(outside_image) + except OSError as exc: + pytest.skip(f"symlink creation failed: {exc}") + + result = view_image_tool.func( + runtime=_make_runtime(thread_data), + image_path="/mnt/user-data/uploads/escape.png", + tool_call_id="tc-symlink", + ) + + assert "path traversal" in _message_content(result) + assert "viewed_images" not in result.update diff --git a/backend/uv.lock b/backend/uv.lock index a42aa17b7..378bbb842 100644 --- a/backend/uv.lock +++ b/backend/uv.lock @@ -18,28 +18,28 @@ members = [ [[package]] name = "agent-client-protocol" -version = "0.8.1" +version = "0.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1b/7b/7cdac86db388809d9e3bc58cac88cc7dfa49b7615b98fab304a828cd7f8a/agent_client_protocol-0.8.1.tar.gz", hash = "sha256:1bbf15663bf51f64942597f638e32a6284c5da918055d9672d3510e965143dbd", size = 68866, upload-time = "2026-02-13T15:34:54.567Z" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/13/3b893421369767e7043cc115d6ef0df417c298b84563be3a12df0416158d/agent_client_protocol-0.9.0.tar.gz", hash = "sha256:f744c48ab9af0f0b4452e5ab5498d61bcab97c26dbe7d6feec5fd36de49be30b", size = 71853, upload-time = "2026-03-26T01:21:00.379Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4b/f3/219eeca0ad4a20843d4b9eaac5532f87018b9d25730a62a16f54f6c52d1a/agent_client_protocol-0.8.1-py3-none-any.whl", hash = "sha256:9421a11fd435b4831660272d169c3812d553bb7247049c138c3ca127e4b8af8e", size = 54529, upload-time = "2026-02-13T15:34:53.344Z" }, + { url = "https://files.pythonhosted.org/packages/8f/ed/c284543c08aa443a4ef2c8bd120be51da8433dd174c01749b5d87c333f22/agent_client_protocol-0.9.0-py3-none-any.whl", hash = "sha256:06911500b51d8cb69112544e2be01fc5e7db39ef88fecbc3848c5c6f194798ee", size = 56850, upload-time = "2026-03-26T01:20:59.252Z" }, ] [[package]] name = "agent-sandbox" -version = "0.0.19" +version = "0.0.30" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx", extra = ["socks"] }, { name = "pydantic" }, { name = "volcengine-python-sdk" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b0/21/62d527b1c671ad82f8f11b4caa585b85e829e5a23960ee83facae49da69b/agent_sandbox-0.0.19.tar.gz", hash = "sha256:724b40d7a20eedd1da67f254d02705a794d0835ebc30c9b5ca8aa148accf3bbd", size = 68114, upload-time = "2025-12-11T08:24:29.174Z" } +sdist = { url = "https://files.pythonhosted.org/packages/32/5c/94d924db07da1e4f0925dd593939e652bb45d60cc749756bcce2d302817d/agent_sandbox-0.0.30.tar.gz", hash = "sha256:29677beca8fabf9ee1ec7c0e5637d15ba37a7e1f58cc8deb82611772fdec43bb", size = 111818, upload-time = "2026-03-24T09:38:19.299Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/19/8c8f3d786ea65fb8a40ba7ac7e5fa0dd972fba413421a139cd6ca3679fe2/agent_sandbox-0.0.19-py2.py3-none-any.whl", hash = "sha256:063b6ffe7d035d84289e60339cbb0708169efe89f9d322e94c071ae2ee5bec5a", size = 152276, upload-time = "2025-12-11T08:24:27.682Z" }, + { url = "https://files.pythonhosted.org/packages/5b/bc/b83d625cf8cb9611c379b452d60da47a26fb7ccfa6c56c63cde4e45214c2/agent_sandbox-0.0.30-py2.py3-none-any.whl", hash = "sha256:bbcca45a202abb7cd01162473f01f3218de5037745ef4fa539e617a3cc973896", size = 243633, upload-time = "2026-03-24T09:38:17.8Z" }, ] [[package]] @@ -53,7 +53,7 @@ wheels = [ [[package]] name = "aiohttp" -version = "3.13.4" +version = "3.13.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, @@ -64,76 +64,76 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/4a/064321452809dae953c1ed6e017504e72551a26b6f5708a5a80e4bf556ff/aiohttp-3.13.4.tar.gz", hash = "sha256:d97a6d09c66087890c2ab5d49069e1e570583f7ac0314ecf98294c1b6aaebd38", size = 7859748, upload-time = "2026-03-28T17:19:40.6Z" } +sdist = { url = "https://files.pythonhosted.org/packages/77/9a/152096d4808df8e4268befa55fba462f440f14beab85e8ad9bf990516918/aiohttp-3.13.5.tar.gz", hash = "sha256:9d98cc980ecc96be6eb4c1994ce35d28d8b1f5e5208a23b421187d1209dbb7d1", size = 7858271, upload-time = "2026-03-31T22:01:03.343Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/bd/ede278648914cabbabfdf95e436679b5d4156e417896a9b9f4587169e376/aiohttp-3.13.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ee62d4471ce86b108b19c3364db4b91180d13fe3510144872d6bad5401957360", size = 752158, upload-time = "2026-03-28T17:16:06.901Z" }, - { url = "https://files.pythonhosted.org/packages/90/de/581c053253c07b480b03785196ca5335e3c606a37dc73e95f6527f1591fe/aiohttp-3.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c0fd8f41b54b58636402eb493afd512c23580456f022c1ba2db0f810c959ed0d", size = 501037, upload-time = "2026-03-28T17:16:08.82Z" }, - { url = "https://files.pythonhosted.org/packages/fa/f9/a5ede193c08f13cc42c0a5b50d1e246ecee9115e4cf6e900d8dbd8fd6acb/aiohttp-3.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4baa48ce49efd82d6b1a0be12d6a36b35e5594d1dd42f8bfba96ea9f8678b88c", size = 501556, upload-time = "2026-03-28T17:16:10.63Z" }, - { url = "https://files.pythonhosted.org/packages/d6/10/88ff67cd48a6ec36335b63a640abe86135791544863e0cfe1f065d6cef7a/aiohttp-3.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d738ebab9f71ee652d9dbd0211057690022201b11197f9a7324fd4dba128aa97", size = 1757314, upload-time = "2026-03-28T17:16:12.498Z" }, - { url = "https://files.pythonhosted.org/packages/8b/15/fdb90a5cf5a1f52845c276e76298c75fbbcc0ac2b4a86551906d54529965/aiohttp-3.13.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0ce692c3468fa831af7dceed52edf51ac348cebfc8d3feb935927b63bd3e8576", size = 1731819, upload-time = "2026-03-28T17:16:14.558Z" }, - { url = "https://files.pythonhosted.org/packages/ec/df/28146785a007f7820416be05d4f28cc207493efd1e8c6c1068e9bdc29198/aiohttp-3.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8e08abcfe752a454d2cb89ff0c08f2d1ecd057ae3e8cc6d84638de853530ebab", size = 1793279, upload-time = "2026-03-28T17:16:16.594Z" }, - { url = "https://files.pythonhosted.org/packages/10/47/689c743abf62ea7a77774d5722f220e2c912a77d65d368b884d9779ef41b/aiohttp-3.13.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5977f701b3fff36367a11087f30ea73c212e686d41cd363c50c022d48b011d8d", size = 1891082, upload-time = "2026-03-28T17:16:18.71Z" }, - { url = "https://files.pythonhosted.org/packages/b0/b6/f7f4f318c7e58c23b761c9b13b9a3c9b394e0f9d5d76fbc6622fa98509f6/aiohttp-3.13.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:54203e10405c06f8b6020bd1e076ae0fe6c194adcee12a5a78af3ffa3c57025e", size = 1773938, upload-time = "2026-03-28T17:16:21.125Z" }, - { url = "https://files.pythonhosted.org/packages/aa/06/f207cb3121852c989586a6fc16ff854c4fcc8651b86c5d3bd1fc83057650/aiohttp-3.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:358a6af0145bc4dda037f13167bef3cce54b132087acc4c295c739d05d16b1c3", size = 1579548, upload-time = "2026-03-28T17:16:23.588Z" }, - { url = "https://files.pythonhosted.org/packages/6c/58/e1289661a32161e24c1fe479711d783067210d266842523752869cc1d9c2/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:898ea1850656d7d61832ef06aa9846ab3ddb1621b74f46de78fbc5e1a586ba83", size = 1714669, upload-time = "2026-03-28T17:16:25.713Z" }, - { url = "https://files.pythonhosted.org/packages/96/0a/3e86d039438a74a86e6a948a9119b22540bae037d6ba317a042ae3c22711/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7bc30cceb710cf6a44e9617e43eebb6e3e43ad855a34da7b4b6a73537d8a6763", size = 1754175, upload-time = "2026-03-28T17:16:28.18Z" }, - { url = "https://files.pythonhosted.org/packages/f4/30/e717fc5df83133ba467a560b6d8ef20197037b4bb5d7075b90037de1018e/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4a31c0c587a8a038f19a4c7e60654a6c899c9de9174593a13e7cc6e15ff271f9", size = 1762049, upload-time = "2026-03-28T17:16:30.941Z" }, - { url = "https://files.pythonhosted.org/packages/e4/28/8f7a2d4492e336e40005151bdd94baf344880a4707573378579f833a64c1/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2062f675f3fe6e06d6113eb74a157fb9df58953ffed0cdb4182554b116545758", size = 1570861, upload-time = "2026-03-28T17:16:32.953Z" }, - { url = "https://files.pythonhosted.org/packages/78/45/12e1a3d0645968b1c38de4b23fdf270b8637735ea057d4f84482ff918ad9/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d1ba8afb847ff80626d5e408c1fdc99f942acc877d0702fe137015903a220a9", size = 1790003, upload-time = "2026-03-28T17:16:35.468Z" }, - { url = "https://files.pythonhosted.org/packages/eb/0f/60374e18d590de16dcb39d6ff62f39c096c1b958e6f37727b5870026ea30/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b08149419994cdd4d5eecf7fd4bc5986b5a9380285bcd01ab4c0d6bfca47b79d", size = 1737289, upload-time = "2026-03-28T17:16:38.187Z" }, - { url = "https://files.pythonhosted.org/packages/02/bf/535e58d886cfbc40a8b0013c974afad24ef7632d645bca0b678b70033a60/aiohttp-3.13.4-cp312-cp312-win32.whl", hash = "sha256:fc432f6a2c4f720180959bc19aa37259651c1a4ed8af8afc84dd41c60f15f791", size = 434185, upload-time = "2026-03-28T17:16:40.735Z" }, - { url = "https://files.pythonhosted.org/packages/1e/1a/d92e3325134ebfff6f4069f270d3aac770d63320bd1fcd0eca023e74d9a8/aiohttp-3.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:6148c9ae97a3e8bff9a1fc9c757fa164116f86c100468339730e717590a3fb77", size = 461285, upload-time = "2026-03-28T17:16:42.713Z" }, - { url = "https://files.pythonhosted.org/packages/e3/ac/892f4162df9b115b4758d615f32ec63d00f3084c705ff5526630887b9b42/aiohttp-3.13.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:63dd5e5b1e43b8fb1e91b79b7ceba1feba588b317d1edff385084fcc7a0a4538", size = 745744, upload-time = "2026-03-28T17:16:44.67Z" }, - { url = "https://files.pythonhosted.org/packages/97/a9/c5b87e4443a2f0ea88cb3000c93a8fdad1ee63bffc9ded8d8c8e0d66efc6/aiohttp-3.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:746ac3cc00b5baea424dacddea3ec2c2702f9590de27d837aa67004db1eebc6e", size = 498178, upload-time = "2026-03-28T17:16:46.766Z" }, - { url = "https://files.pythonhosted.org/packages/94/42/07e1b543a61250783650df13da8ddcdc0d0a5538b2bd15cef6e042aefc61/aiohttp-3.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bda8f16ea99d6a6705e5946732e48487a448be874e54a4f73d514660ff7c05d3", size = 498331, upload-time = "2026-03-28T17:16:48.9Z" }, - { url = "https://files.pythonhosted.org/packages/20/d6/492f46bf0328534124772d0cf58570acae5b286ea25006900650f69dae0e/aiohttp-3.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b061e7b5f840391e3f64d0ddf672973e45c4cfff7a0feea425ea24e51530fc2", size = 1744414, upload-time = "2026-03-28T17:16:50.968Z" }, - { url = "https://files.pythonhosted.org/packages/e2/4d/e02627b2683f68051246215d2d62b2d2f249ff7a285e7a858dc47d6b6a14/aiohttp-3.13.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b252e8d5cd66184b570d0d010de742736e8a4fab22c58299772b0c5a466d4b21", size = 1719226, upload-time = "2026-03-28T17:16:53.173Z" }, - { url = "https://files.pythonhosted.org/packages/7b/6c/5d0a3394dd2b9f9aeba6e1b6065d0439e4b75d41f1fb09a3ec010b43552b/aiohttp-3.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:20af8aad61d1803ff11152a26146d8d81c266aa8c5aa9b4504432abb965c36a0", size = 1782110, upload-time = "2026-03-28T17:16:55.362Z" }, - { url = "https://files.pythonhosted.org/packages/0d/2d/c20791e3437700a7441a7edfb59731150322424f5aadf635602d1d326101/aiohttp-3.13.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:13a5cc924b59859ad2adb1478e31f410a7ed46e92a2a619d6d1dd1a63c1a855e", size = 1884809, upload-time = "2026-03-28T17:16:57.734Z" }, - { url = "https://files.pythonhosted.org/packages/c8/94/d99dbfbd1924a87ef643833932eb2a3d9e5eee87656efea7d78058539eff/aiohttp-3.13.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:534913dfb0a644d537aebb4123e7d466d94e3be5549205e6a31f72368980a81a", size = 1764938, upload-time = "2026-03-28T17:17:00.221Z" }, - { url = "https://files.pythonhosted.org/packages/49/61/3ce326a1538781deb89f6cf5e094e2029cd308ed1e21b2ba2278b08426f6/aiohttp-3.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:320e40192a2dcc1cf4b5576936e9652981ab596bf81eb309535db7e2f5b5672f", size = 1570697, upload-time = "2026-03-28T17:17:02.985Z" }, - { url = "https://files.pythonhosted.org/packages/b6/77/4ab5a546857bb3028fbaf34d6eea180267bdab022ee8b1168b1fcde4bfdd/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9e587fcfce2bcf06526a43cb705bdee21ac089096f2e271d75de9c339db3100c", size = 1702258, upload-time = "2026-03-28T17:17:05.28Z" }, - { url = "https://files.pythonhosted.org/packages/79/63/d8f29021e39bc5af8e5d5e9da1b07976fb9846487a784e11e4f4eeda4666/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9eb9c2eea7278206b5c6c1441fdd9dc420c278ead3f3b2cc87f9b693698cc500", size = 1740287, upload-time = "2026-03-28T17:17:07.712Z" }, - { url = "https://files.pythonhosted.org/packages/55/3a/cbc6b3b124859a11bc8055d3682c26999b393531ef926754a3445b99dfef/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:29be00c51972b04bf9d5c8f2d7f7314f48f96070ca40a873a53056e652e805f7", size = 1753011, upload-time = "2026-03-28T17:17:10.053Z" }, - { url = "https://files.pythonhosted.org/packages/e0/30/836278675205d58c1368b21520eab9572457cf19afd23759216c04483048/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:90c06228a6c3a7c9f776fe4fc0b7ff647fffd3bed93779a6913c804ae00c1073", size = 1566359, upload-time = "2026-03-28T17:17:12.433Z" }, - { url = "https://files.pythonhosted.org/packages/50/b4/8032cc9b82d17e4277704ba30509eaccb39329dc18d6a35f05e424439e32/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:a533ec132f05fd9a1d959e7f34184cd7d5e8511584848dab85faefbaac573069", size = 1785537, upload-time = "2026-03-28T17:17:14.721Z" }, - { url = "https://files.pythonhosted.org/packages/17/7d/5873e98230bde59f493bf1f7c3e327486a4b5653fa401144704df5d00211/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1c946f10f413836f82ea4cfb90200d2a59578c549f00857e03111cf45ad01ca5", size = 1740752, upload-time = "2026-03-28T17:17:17.387Z" }, - { url = "https://files.pythonhosted.org/packages/7b/f2/13e46e0df051494d7d3c68b7f72d071f48c384c12716fc294f75d5b1a064/aiohttp-3.13.4-cp313-cp313-win32.whl", hash = "sha256:48708e2706106da6967eff5908c78ca3943f005ed6bcb75da2a7e4da94ef8c70", size = 433187, upload-time = "2026-03-28T17:17:19.523Z" }, - { url = "https://files.pythonhosted.org/packages/ea/c0/649856ee655a843c8f8664592cfccb73ac80ede6a8c8db33a25d810c12db/aiohttp-3.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:74a2eb058da44fa3a877a49e2095b591d4913308bb424c418b77beb160c55ce3", size = 459778, upload-time = "2026-03-28T17:17:21.964Z" }, - { url = "https://files.pythonhosted.org/packages/6d/29/6657cc37ae04cacc2dbf53fb730a06b6091cc4cbe745028e047c53e6d840/aiohttp-3.13.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:e0a2c961fc92abeff61d6444f2ce6ad35bb982db9fc8ff8a47455beacf454a57", size = 749363, upload-time = "2026-03-28T17:17:24.044Z" }, - { url = "https://files.pythonhosted.org/packages/90/7f/30ccdf67ca3d24b610067dc63d64dcb91e5d88e27667811640644aa4a85d/aiohttp-3.13.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:153274535985a0ff2bff1fb6c104ed547cec898a09213d21b0f791a44b14d933", size = 499317, upload-time = "2026-03-28T17:17:26.199Z" }, - { url = "https://files.pythonhosted.org/packages/93/13/e372dd4e68ad04ee25dafb050c7f98b0d91ea643f7352757e87231102555/aiohttp-3.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:351f3171e2458da3d731ce83f9e6b9619e325c45cbd534c7759750cabf453ad7", size = 500477, upload-time = "2026-03-28T17:17:28.279Z" }, - { url = "https://files.pythonhosted.org/packages/e5/fe/ee6298e8e586096fb6f5eddd31393d8544f33ae0792c71ecbb4c2bef98ac/aiohttp-3.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f989ac8bc5595ff761a5ccd32bdb0768a117f36dd1504b1c2c074ed5d3f4df9c", size = 1737227, upload-time = "2026-03-28T17:17:30.587Z" }, - { url = "https://files.pythonhosted.org/packages/b0/b9/a7a0463a09e1a3fe35100f74324f23644bfc3383ac5fd5effe0722a5f0b7/aiohttp-3.13.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d36fc1709110ec1e87a229b201dd3ddc32aa01e98e7868083a794609b081c349", size = 1694036, upload-time = "2026-03-28T17:17:33.29Z" }, - { url = "https://files.pythonhosted.org/packages/57/7c/8972ae3fb7be00a91aee6b644b2a6a909aedb2c425269a3bfd90115e6f8f/aiohttp-3.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42adaeea83cbdf069ab94f5103ce0787c21fb1a0153270da76b59d5578302329", size = 1786814, upload-time = "2026-03-28T17:17:36.035Z" }, - { url = "https://files.pythonhosted.org/packages/93/01/c81e97e85c774decbaf0d577de7d848934e8166a3a14ad9f8aa5be329d28/aiohttp-3.13.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:92deb95469928cc41fd4b42a95d8012fa6df93f6b1c0a83af0ffbc4a5e218cde", size = 1866676, upload-time = "2026-03-28T17:17:38.441Z" }, - { url = "https://files.pythonhosted.org/packages/5a/5f/5b46fe8694a639ddea2cd035bf5729e4677ea882cb251396637e2ef1590d/aiohttp-3.13.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0c0c7c07c4257ef3a1df355f840bc62d133bcdef5c1c5ba75add3c08553e2eed", size = 1740842, upload-time = "2026-03-28T17:17:40.783Z" }, - { url = "https://files.pythonhosted.org/packages/20/a2/0d4b03d011cca6b6b0acba8433193c1e484efa8d705ea58295590fe24203/aiohttp-3.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f062c45de8a1098cb137a1898819796a2491aec4e637a06b03f149315dff4d8f", size = 1566508, upload-time = "2026-03-28T17:17:43.235Z" }, - { url = "https://files.pythonhosted.org/packages/98/17/e689fd500da52488ec5f889effd6404dece6a59de301e380f3c64f167beb/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:76093107c531517001114f0ebdb4f46858ce818590363e3e99a4a2280334454a", size = 1700569, upload-time = "2026-03-28T17:17:46.165Z" }, - { url = "https://files.pythonhosted.org/packages/d8/0d/66402894dbcf470ef7db99449e436105ea862c24f7ea4c95c683e635af35/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:6f6ec32162d293b82f8b63a16edc80769662fbd5ae6fbd4936d3206a2c2cc63b", size = 1707407, upload-time = "2026-03-28T17:17:48.825Z" }, - { url = "https://files.pythonhosted.org/packages/2f/eb/af0ab1a3650092cbd8e14ef29e4ab0209e1460e1c299996c3f8288b3f1ff/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5903e2db3d202a00ad9f0ec35a122c005e85d90c9836ab4cda628f01edf425e2", size = 1752214, upload-time = "2026-03-28T17:17:51.206Z" }, - { url = "https://files.pythonhosted.org/packages/5a/bf/72326f8a98e4c666f292f03c385545963cc65e358835d2a7375037a97b57/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2d5bea57be7aca98dbbac8da046d99b5557c5cf4e28538c4c786313078aca09e", size = 1562162, upload-time = "2026-03-28T17:17:53.634Z" }, - { url = "https://files.pythonhosted.org/packages/67/9f/13b72435f99151dd9a5469c96b3b5f86aa29b7e785ca7f35cf5e538f74c0/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:bcf0c9902085976edc0232b75006ef38f89686901249ce14226b6877f88464fb", size = 1768904, upload-time = "2026-03-28T17:17:55.991Z" }, - { url = "https://files.pythonhosted.org/packages/18/bc/28d4970e7d5452ac7776cdb5431a1164a0d9cf8bd2fffd67b4fb463aa56d/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3295f98bfeed2e867cab588f2a146a9db37a85e3ae9062abf46ba062bd29165", size = 1723378, upload-time = "2026-03-28T17:17:58.348Z" }, - { url = "https://files.pythonhosted.org/packages/53/74/b32458ca1a7f34d65bdee7aef2036adbe0438123d3d53e2b083c453c24dd/aiohttp-3.13.4-cp314-cp314-win32.whl", hash = "sha256:a598a5c5767e1369d8f5b08695cab1d8160040f796c4416af76fd773d229b3c9", size = 438711, upload-time = "2026-03-28T17:18:00.728Z" }, - { url = "https://files.pythonhosted.org/packages/40/b2/54b487316c2df3e03a8f3435e9636f8a81a42a69d942164830d193beb56a/aiohttp-3.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:c555db4bc7a264bead5a7d63d92d41a1122fcd39cc62a4db815f45ad46f9c2c8", size = 464977, upload-time = "2026-03-28T17:18:03.367Z" }, - { url = "https://files.pythonhosted.org/packages/47/fb/e41b63c6ce71b07a59243bb8f3b457ee0c3402a619acb9d2c0d21ef0e647/aiohttp-3.13.4-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45abbbf09a129825d13c18c7d3182fecd46d9da3cfc383756145394013604ac1", size = 781549, upload-time = "2026-03-28T17:18:05.779Z" }, - { url = "https://files.pythonhosted.org/packages/97/53/532b8d28df1e17e44c4d9a9368b78dcb6bf0b51037522136eced13afa9e8/aiohttp-3.13.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:74c80b2bc2c2adb7b3d1941b2b60701ee2af8296fc8aad8b8bc48bc25767266c", size = 514383, upload-time = "2026-03-28T17:18:08.096Z" }, - { url = "https://files.pythonhosted.org/packages/1b/1f/62e5d400603e8468cd635812d99cb81cfdc08127a3dc474c647615f31339/aiohttp-3.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c97989ae40a9746650fa196894f317dafc12227c808c774929dda0ff873a5954", size = 518304, upload-time = "2026-03-28T17:18:10.642Z" }, - { url = "https://files.pythonhosted.org/packages/90/57/2326b37b10896447e3c6e0cbef4fe2486d30913639a5cfd1332b5d870f82/aiohttp-3.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dae86be9811493f9990ef44fff1685f5c1a3192e9061a71a109d527944eed551", size = 1893433, upload-time = "2026-03-28T17:18:13.121Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b4/a24d82112c304afdb650167ef2fe190957d81cbddac7460bedd245f765aa/aiohttp-3.13.4-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1db491abe852ca2fa6cc48a3341985b0174b3741838e1341b82ac82c8bd9e871", size = 1755901, upload-time = "2026-03-28T17:18:16.21Z" }, - { url = "https://files.pythonhosted.org/packages/9e/2d/0883ef9d878d7846287f036c162a951968f22aabeef3ac97b0bea6f76d5d/aiohttp-3.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0e5d701c0aad02a7dce72eef6b93226cf3734330f1a31d69ebbf69f33b86666e", size = 1876093, upload-time = "2026-03-28T17:18:18.703Z" }, - { url = "https://files.pythonhosted.org/packages/ad/52/9204bb59c014869b71971addad6778f005daa72a96eed652c496789d7468/aiohttp-3.13.4-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8ac32a189081ae0a10ba18993f10f338ec94341f0d5df8fff348043962f3c6f8", size = 1970815, upload-time = "2026-03-28T17:18:21.858Z" }, - { url = "https://files.pythonhosted.org/packages/d6/b5/e4eb20275a866dde0f570f411b36c6b48f7b53edfe4f4071aa1b0728098a/aiohttp-3.13.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98e968cdaba43e45c73c3f306fca418c8009a957733bac85937c9f9cf3f4de27", size = 1816223, upload-time = "2026-03-28T17:18:24.729Z" }, - { url = "https://files.pythonhosted.org/packages/d8/23/e98075c5bb146aa61a1239ee1ac7714c85e814838d6cebbe37d3fe19214a/aiohttp-3.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca114790c9144c335d538852612d3e43ea0f075288f4849cf4b05d6cd2238ce7", size = 1649145, upload-time = "2026-03-28T17:18:27.269Z" }, - { url = "https://files.pythonhosted.org/packages/d6/c1/7bad8be33bb06c2bb224b6468874346026092762cbec388c3bdb65a368ee/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ea2e071661ba9cfe11eabbc81ac5376eaeb3061f6e72ec4cc86d7cdd1ffbdbbb", size = 1816562, upload-time = "2026-03-28T17:18:29.847Z" }, - { url = "https://files.pythonhosted.org/packages/5c/10/c00323348695e9a5e316825969c88463dcc24c7e9d443244b8a2c9cf2eae/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:34e89912b6c20e0fd80e07fa401fd218a410aa1ce9f1c2f1dad6db1bd0ce0927", size = 1800333, upload-time = "2026-03-28T17:18:32.269Z" }, - { url = "https://files.pythonhosted.org/packages/84/43/9b2147a1df3559f49bd723e22905b46a46c068a53adb54abdca32c4de180/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0e217cf9f6a42908c52b46e42c568bd57adc39c9286ced31aaace614b6087965", size = 1820617, upload-time = "2026-03-28T17:18:35.238Z" }, - { url = "https://files.pythonhosted.org/packages/a9/7f/b3481a81e7a586d02e99387b18c6dafff41285f6efd3daa2124c01f87eae/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:0c296f1221e21ba979f5ac1964c3b78cfde15c5c5f855ffd2caab337e9cd9182", size = 1643417, upload-time = "2026-03-28T17:18:37.949Z" }, - { url = "https://files.pythonhosted.org/packages/8f/72/07181226bc99ce1124e0f89280f5221a82d3ae6a6d9d1973ce429d48e52b/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d99a9d168ebaffb74f36d011750e490085ac418f4db926cce3989c8fe6cb6b1b", size = 1849286, upload-time = "2026-03-28T17:18:40.534Z" }, - { url = "https://files.pythonhosted.org/packages/1a/e6/1b3566e103eca6da5be4ae6713e112a053725c584e96574caf117568ffef/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cb19177205d93b881f3f89e6081593676043a6828f59c78c17a0fd6c1fbed2ba", size = 1782635, upload-time = "2026-03-28T17:18:43.073Z" }, - { url = "https://files.pythonhosted.org/packages/37/58/1b11c71904b8d079eb0c39fe664180dd1e14bebe5608e235d8bfbadc8929/aiohttp-3.13.4-cp314-cp314t-win32.whl", hash = "sha256:c606aa5656dab6552e52ca368e43869c916338346bfaf6304e15c58fb113ea30", size = 472537, upload-time = "2026-03-28T17:18:46.286Z" }, - { url = "https://files.pythonhosted.org/packages/bc/8f/87c56a1a1977d7dddea5b31e12189665a140fdb48a71e9038ff90bb564ec/aiohttp-3.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:014dcc10ec8ab8db681f0d68e939d1e9286a5aa2b993cbbdb0db130853e02144", size = 506381, upload-time = "2026-03-28T17:18:48.74Z" }, + { url = "https://files.pythonhosted.org/packages/be/6f/353954c29e7dcce7cf00280a02c75f30e133c00793c7a2ed3776d7b2f426/aiohttp-3.13.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:023ecba036ddd840b0b19bf195bfae970083fd7024ce1ac22e9bba90464620e9", size = 748876, upload-time = "2026-03-31T21:57:36.319Z" }, + { url = "https://files.pythonhosted.org/packages/f5/1b/428a7c64687b3b2e9cd293186695affc0e1e54a445d0361743b231f11066/aiohttp-3.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15c933ad7920b7d9a20de151efcd05a6e38302cbf0e10c9b2acb9a42210a2416", size = 499557, upload-time = "2026-03-31T21:57:38.236Z" }, + { url = "https://files.pythonhosted.org/packages/29/47/7be41556bfbb6917069d6a6634bb7dd5e163ba445b783a90d40f5ac7e3a7/aiohttp-3.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab2899f9fa2f9f741896ebb6fa07c4c883bfa5c7f2ddd8cf2aafa86fa981b2d2", size = 500258, upload-time = "2026-03-31T21:57:39.923Z" }, + { url = "https://files.pythonhosted.org/packages/67/84/c9ecc5828cb0b3695856c07c0a6817a99d51e2473400f705275a2b3d9239/aiohttp-3.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60eaa2d440cd4707696b52e40ed3e2b0f73f65be07fd0ef23b6b539c9c0b0b4", size = 1749199, upload-time = "2026-03-31T21:57:41.938Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d3/3c6d610e66b495657622edb6ae7c7fd31b2e9086b4ec50b47897ad6042a9/aiohttp-3.13.5-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:55b3bdd3292283295774ab585160c4004f4f2f203946997f49aac032c84649e9", size = 1721013, upload-time = "2026-03-31T21:57:43.904Z" }, + { url = "https://files.pythonhosted.org/packages/49/a0/24409c12217456df0bae7babe3b014e460b0b38a8e60753d6cb339f6556d/aiohttp-3.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2b2355dc094e5f7d45a7bb262fe7207aa0460b37a0d87027dcf21b5d890e7d5", size = 1781501, upload-time = "2026-03-31T21:57:46.285Z" }, + { url = "https://files.pythonhosted.org/packages/98/9d/b65ec649adc5bccc008b0957a9a9c691070aeac4e41cea18559fef49958b/aiohttp-3.13.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b38765950832f7d728297689ad78f5f2cf79ff82487131c4d26fe6ceecdc5f8e", size = 1878981, upload-time = "2026-03-31T21:57:48.734Z" }, + { url = "https://files.pythonhosted.org/packages/57/d8/8d44036d7eb7b6a8ec4c5494ea0c8c8b94fbc0ed3991c1a7adf230df03bf/aiohttp-3.13.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b18f31b80d5a33661e08c89e202edabf1986e9b49c42b4504371daeaa11b47c1", size = 1767934, upload-time = "2026-03-31T21:57:51.171Z" }, + { url = "https://files.pythonhosted.org/packages/31/04/d3f8211f273356f158e3464e9e45484d3fb8c4ce5eb2f6fe9405c3273983/aiohttp-3.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:33add2463dde55c4f2d9635c6ab33ce154e5ecf322bd26d09af95c5f81cfa286", size = 1566671, upload-time = "2026-03-31T21:57:53.326Z" }, + { url = "https://files.pythonhosted.org/packages/41/db/073e4ebe00b78e2dfcacff734291651729a62953b48933d765dc513bf798/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:327cc432fdf1356fb4fbc6fe833ad4e9f6aacb71a8acaa5f1855e4b25910e4a9", size = 1705219, upload-time = "2026-03-31T21:57:55.385Z" }, + { url = "https://files.pythonhosted.org/packages/48/45/7dfba71a2f9fd97b15c95c06819de7eb38113d2cdb6319669195a7d64270/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7c35b0bf0b48a70b4cb4fc5d7bed9b932532728e124874355de1a0af8ec4bc88", size = 1743049, upload-time = "2026-03-31T21:57:57.341Z" }, + { url = "https://files.pythonhosted.org/packages/18/71/901db0061e0f717d226386a7f471bb59b19566f2cae5f0d93874b017271f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:df23d57718f24badef8656c49743e11a89fd6f5358fa8a7b96e728fda2abf7d3", size = 1749557, upload-time = "2026-03-31T21:57:59.626Z" }, + { url = "https://files.pythonhosted.org/packages/08/d5/41eebd16066e59cd43728fe74bce953d7402f2b4ddfdfef2c0e9f17ca274/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:02e048037a6501a5ec1f6fc9736135aec6eb8a004ce48838cb951c515f32c80b", size = 1558931, upload-time = "2026-03-31T21:58:01.972Z" }, + { url = "https://files.pythonhosted.org/packages/30/e6/4a799798bf05740e66c3a1161079bda7a3dd8e22ca392481d7a7f9af82a6/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31cebae8b26f8a615d2b546fee45d5ffb76852ae6450e2a03f42c9102260d6fe", size = 1774125, upload-time = "2026-03-31T21:58:04.007Z" }, + { url = "https://files.pythonhosted.org/packages/84/63/7749337c90f92bc2cb18f9560d67aa6258c7060d1397d21529b8004fcf6f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:888e78eb5ca55a615d285c3c09a7a91b42e9dd6fc699b166ebd5dee87c9ccf14", size = 1732427, upload-time = "2026-03-31T21:58:06.337Z" }, + { url = "https://files.pythonhosted.org/packages/98/de/cf2f44ff98d307e72fb97d5f5bbae3bfcb442f0ea9790c0bf5c5c2331404/aiohttp-3.13.5-cp312-cp312-win32.whl", hash = "sha256:8bd3ec6376e68a41f9f95f5ed170e2fcf22d4eb27a1f8cb361d0508f6e0557f3", size = 433534, upload-time = "2026-03-31T21:58:08.712Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ca/eadf6f9c8fa5e31d40993e3db153fb5ed0b11008ad5d9de98a95045bed84/aiohttp-3.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:110e448e02c729bcebb18c60b9214a87ba33bac4a9fa5e9a5f139938b56c6cb1", size = 460446, upload-time = "2026-03-31T21:58:10.945Z" }, + { url = "https://files.pythonhosted.org/packages/78/e9/d76bf503005709e390122d34e15256b88f7008e246c4bdbe915cd4f1adce/aiohttp-3.13.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5029cc80718bbd545123cd8fe5d15025eccaaaace5d0eeec6bd556ad6163d61", size = 742930, upload-time = "2026-03-31T21:58:13.155Z" }, + { url = "https://files.pythonhosted.org/packages/57/00/4b7b70223deaebd9bb85984d01a764b0d7bd6526fcdc73cca83bcbe7243e/aiohttp-3.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4bb6bf5811620003614076bdc807ef3b5e38244f9d25ca5fe888eaccea2a9832", size = 496927, upload-time = "2026-03-31T21:58:15.073Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f5/0fb20fb49f8efdcdce6cd8127604ad2c503e754a8f139f5e02b01626523f/aiohttp-3.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a84792f8631bf5a94e52d9cc881c0b824ab42717165a5579c760b830d9392ac9", size = 497141, upload-time = "2026-03-31T21:58:17.009Z" }, + { url = "https://files.pythonhosted.org/packages/3b/86/b7c870053e36a94e8951b803cb5b909bfbc9b90ca941527f5fcafbf6b0fa/aiohttp-3.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57653eac22c6a4c13eb22ecf4d673d64a12f266e72785ab1c8b8e5940d0e8090", size = 1732476, upload-time = "2026-03-31T21:58:18.925Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e5/4e161f84f98d80c03a238671b4136e6530453d65262867d989bbe78244d0/aiohttp-3.13.5-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5e5f7debc7a57af53fdf5c5009f9391d9f4c12867049d509bf7bb164a6e295b", size = 1706507, upload-time = "2026-03-31T21:58:21.094Z" }, + { url = "https://files.pythonhosted.org/packages/d4/56/ea11a9f01518bd5a2a2fcee869d248c4b8a0cfa0bb13401574fa31adf4d4/aiohttp-3.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c719f65bebcdf6716f10e9eff80d27567f7892d8988c06de12bbbd39307c6e3a", size = 1773465, upload-time = "2026-03-31T21:58:23.159Z" }, + { url = "https://files.pythonhosted.org/packages/eb/40/333ca27fb74b0383f17c90570c748f7582501507307350a79d9f9f3c6eb1/aiohttp-3.13.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d97f93fdae594d886c5a866636397e2bcab146fd7a132fd6bb9ce182224452f8", size = 1873523, upload-time = "2026-03-31T21:58:25.59Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d2/e2f77eef1acb7111405433c707dc735e63f67a56e176e72e9e7a2cd3f493/aiohttp-3.13.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3df334e39d4c2f899a914f1dba283c1aadc311790733f705182998c6f7cae665", size = 1754113, upload-time = "2026-03-31T21:58:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/fb/56/3f653d7f53c89669301ec9e42c95233e2a0c0a6dd051269e6e678db4fdb0/aiohttp-3.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe6970addfea9e5e081401bcbadf865d2b6da045472f58af08427e108d618540", size = 1562351, upload-time = "2026-03-31T21:58:29.918Z" }, + { url = "https://files.pythonhosted.org/packages/ec/a6/9b3e91eb8ae791cce4ee736da02211c85c6f835f1bdfac0594a8a3b7018c/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7becdf835feff2f4f335d7477f121af787e3504b48b449ff737afb35869ba7bb", size = 1693205, upload-time = "2026-03-31T21:58:32.214Z" }, + { url = "https://files.pythonhosted.org/packages/98/fc/bfb437a99a2fcebd6b6eaec609571954de2ed424f01c352f4b5504371dd3/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:676e5651705ad5d8a70aeb8eb6936c436d8ebbd56e63436cb7dd9bb36d2a9a46", size = 1730618, upload-time = "2026-03-31T21:58:34.728Z" }, + { url = "https://files.pythonhosted.org/packages/e4/b6/c8534862126191a034f68153194c389addc285a0f1347d85096d349bbc15/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9b16c653d38eb1a611cc898c41e76859ca27f119d25b53c12875fd0474ae31a8", size = 1745185, upload-time = "2026-03-31T21:58:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/0b/93/4ca8ee2ef5236e2707e0fd5fecb10ce214aee1ff4ab307af9c558bda3b37/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:999802d5fa0389f58decd24b537c54aa63c01c3219ce17d1214cbda3c2b22d2d", size = 1557311, upload-time = "2026-03-31T21:58:39.38Z" }, + { url = "https://files.pythonhosted.org/packages/57/ae/76177b15f18c5f5d094f19901d284025db28eccc5ae374d1d254181d33f4/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ec707059ee75732b1ba130ed5f9580fe10ff75180c812bc267ded039db5128c6", size = 1773147, upload-time = "2026-03-31T21:58:41.476Z" }, + { url = "https://files.pythonhosted.org/packages/01/a4/62f05a0a98d88af59d93b7fcac564e5f18f513cb7471696ac286db970d6a/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d6d44a5b48132053c2f6cd5c8cb14bc67e99a63594e336b0f2af81e94d5530c", size = 1730356, upload-time = "2026-03-31T21:58:44.049Z" }, + { url = "https://files.pythonhosted.org/packages/e4/85/fc8601f59dfa8c9523808281f2da571f8b4699685f9809a228adcc90838d/aiohttp-3.13.5-cp313-cp313-win32.whl", hash = "sha256:329f292ed14d38a6c4c435e465f48bebb47479fd676a0411936cc371643225cc", size = 432637, upload-time = "2026-03-31T21:58:46.167Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1b/ac685a8882896acf0f6b31d689e3792199cfe7aba37969fa91da63a7fa27/aiohttp-3.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:69f571de7500e0557801c0b51f4780482c0ec5fe2ac851af5a92cfce1af1cb83", size = 458896, upload-time = "2026-03-31T21:58:48.119Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/46572759afc859e867a5bc8ec3487315869013f59281ce61764f76d879de/aiohttp-3.13.5-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:eb4639f32fd4a9904ab8fb45bf3383ba71137f3d9d4ba25b3b3f3109977c5b8c", size = 745721, upload-time = "2026-03-31T21:58:50.229Z" }, + { url = "https://files.pythonhosted.org/packages/13/fe/8a2efd7626dbe6049b2ef8ace18ffda8a4dfcbe1bcff3ac30c0c7575c20b/aiohttp-3.13.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:7e5dc4311bd5ac493886c63cbf76ab579dbe4641268e7c74e48e774c74b6f2be", size = 497663, upload-time = "2026-03-31T21:58:52.232Z" }, + { url = "https://files.pythonhosted.org/packages/9b/91/cc8cc78a111826c54743d88651e1687008133c37e5ee615fee9b57990fac/aiohttp-3.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:756c3c304d394977519824449600adaf2be0ccee76d206ee339c5e76b70ded25", size = 499094, upload-time = "2026-03-31T21:58:54.566Z" }, + { url = "https://files.pythonhosted.org/packages/0a/33/a8362cb15cf16a3af7e86ed11962d5cd7d59b449202dc576cdc731310bde/aiohttp-3.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecc26751323224cf8186efcf7fbcbc30f4e1d8c7970659daf25ad995e4032a56", size = 1726701, upload-time = "2026-03-31T21:58:56.864Z" }, + { url = "https://files.pythonhosted.org/packages/45/0c/c091ac5c3a17114bd76cbf85d674650969ddf93387876cf67f754204bd77/aiohttp-3.13.5-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10a75acfcf794edf9d8db50e5a7ec5fc818b2a8d3f591ce93bc7b1210df016d2", size = 1683360, upload-time = "2026-03-31T21:58:59.072Z" }, + { url = "https://files.pythonhosted.org/packages/23/73/bcee1c2b79bc275e964d1446c55c54441a461938e70267c86afaae6fba27/aiohttp-3.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f7a18f258d124cd678c5fe072fe4432a4d5232b0657fca7c1847f599233c83a", size = 1773023, upload-time = "2026-03-31T21:59:01.776Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ef/720e639df03004fee2d869f771799d8c23046dec47d5b81e396c7cda583a/aiohttp-3.13.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:df6104c009713d3a89621096f3e3e88cc323fd269dbd7c20afe18535094320be", size = 1853795, upload-time = "2026-03-31T21:59:04.568Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c9/989f4034fb46841208de7aeeac2c6d8300745ab4f28c42f629ba77c2d916/aiohttp-3.13.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:241a94f7de7c0c3b616627aaad530fe2cb620084a8b144d3be7b6ecfe95bae3b", size = 1730405, upload-time = "2026-03-31T21:59:07.221Z" }, + { url = "https://files.pythonhosted.org/packages/ce/75/ee1fd286ca7dc599d824b5651dad7b3be7ff8d9a7e7b3fe9820d9180f7db/aiohttp-3.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c974fb66180e58709b6fc402846f13791240d180b74de81d23913abe48e96d94", size = 1558082, upload-time = "2026-03-31T21:59:09.484Z" }, + { url = "https://files.pythonhosted.org/packages/c3/20/1e9e6650dfc436340116b7aa89ff8cb2bbdf0abc11dfaceaad8f74273a10/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6e27ea05d184afac78aabbac667450c75e54e35f62238d44463131bd3f96753d", size = 1692346, upload-time = "2026-03-31T21:59:12.068Z" }, + { url = "https://files.pythonhosted.org/packages/d8/40/8ebc6658d48ea630ac7903912fe0dd4e262f0e16825aa4c833c56c9f1f56/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a79a6d399cef33a11b6f004c67bb07741d91f2be01b8d712d52c75711b1e07c7", size = 1698891, upload-time = "2026-03-31T21:59:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/d8/78/ea0ae5ec8ba7a5c10bdd6e318f1ba5e76fcde17db8275188772afc7917a4/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c632ce9c0b534fbe25b52c974515ed674937c5b99f549a92127c85f771a78772", size = 1742113, upload-time = "2026-03-31T21:59:17.068Z" }, + { url = "https://files.pythonhosted.org/packages/8a/66/9d308ed71e3f2491be1acb8769d96c6f0c47d92099f3bc9119cada27b357/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:fceedde51fbd67ee2bcc8c0b33d0126cc8b51ef3bbde2f86662bd6d5a6f10ec5", size = 1553088, upload-time = "2026-03-31T21:59:19.541Z" }, + { url = "https://files.pythonhosted.org/packages/da/a6/6cc25ed8dfc6e00c90f5c6d126a98e2cf28957ad06fa1036bd34b6f24a2c/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f92995dfec9420bb69ae629abf422e516923ba79ba4403bc750d94fb4a6c68c1", size = 1757976, upload-time = "2026-03-31T21:59:22.311Z" }, + { url = "https://files.pythonhosted.org/packages/c1/2b/cce5b0ffe0de99c83e5e36d8f828e4161e415660a9f3e58339d07cce3006/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20ae0ff08b1f2c8788d6fb85afcb798654ae6ba0b747575f8562de738078457b", size = 1712444, upload-time = "2026-03-31T21:59:24.635Z" }, + { url = "https://files.pythonhosted.org/packages/6c/cf/9e1795b4160c58d29421eafd1a69c6ce351e2f7c8d3c6b7e4ca44aea1a5b/aiohttp-3.13.5-cp314-cp314-win32.whl", hash = "sha256:b20df693de16f42b2472a9c485e1c948ee55524786a0a34345511afdd22246f3", size = 438128, upload-time = "2026-03-31T21:59:27.291Z" }, + { url = "https://files.pythonhosted.org/packages/22/4d/eaedff67fc805aeba4ba746aec891b4b24cebb1a7d078084b6300f79d063/aiohttp-3.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:f85c6f327bf0b8c29da7d93b1cabb6363fb5e4e160a32fa241ed2dce21b73162", size = 464029, upload-time = "2026-03-31T21:59:29.429Z" }, + { url = "https://files.pythonhosted.org/packages/79/11/c27d9332ee20d68dd164dc12a6ecdef2e2e35ecc97ed6cf0d2442844624b/aiohttp-3.13.5-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:1efb06900858bb618ff5cee184ae2de5828896c448403d51fb633f09e109be0a", size = 778758, upload-time = "2026-03-31T21:59:31.547Z" }, + { url = "https://files.pythonhosted.org/packages/04/fb/377aead2e0a3ba5f09b7624f702a964bdf4f08b5b6728a9799830c80041e/aiohttp-3.13.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:fee86b7c4bd29bdaf0d53d14739b08a106fdda809ca5fe032a15f52fae5fe254", size = 512883, upload-time = "2026-03-31T21:59:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a6/aa109a33671f7a5d3bd78b46da9d852797c5e665bfda7d6b373f56bff2ec/aiohttp-3.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:20058e23909b9e65f9da62b396b77dfa95965cbe840f8def6e572538b1d32e36", size = 516668, upload-time = "2026-03-31T21:59:36.497Z" }, + { url = "https://files.pythonhosted.org/packages/79/b3/ca078f9f2fa9563c36fb8ef89053ea2bb146d6f792c5104574d49d8acb63/aiohttp-3.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cf20a8d6868cb15a73cab329ffc07291ba8c22b1b88176026106ae39aa6df0f", size = 1883461, upload-time = "2026-03-31T21:59:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e3/a7ad633ca1ca497b852233a3cce6906a56c3225fb6d9217b5e5e60b7419d/aiohttp-3.13.5-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:330f5da04c987f1d5bdb8ae189137c77139f36bd1cb23779ca1a354a4b027800", size = 1747661, upload-time = "2026-03-31T21:59:41.187Z" }, + { url = "https://files.pythonhosted.org/packages/33/b9/cd6fe579bed34a906d3d783fe60f2fa297ef55b27bb4538438ee49d4dc41/aiohttp-3.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f1cbf0c7926d315c3c26c2da41fd2b5d2fe01ac0e157b78caefc51a782196cf", size = 1863800, upload-time = "2026-03-31T21:59:43.84Z" }, + { url = "https://files.pythonhosted.org/packages/c0/3f/2c1e2f5144cefa889c8afd5cf431994c32f3b29da9961698ff4e3811b79a/aiohttp-3.13.5-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:53fc049ed6390d05423ba33103ded7281fe897cf97878f369a527070bd95795b", size = 1958382, upload-time = "2026-03-31T21:59:46.187Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/f31ec3f1013723b3babe3609e7f119c2c2fb6ef33da90061a705ef3e1bc8/aiohttp-3.13.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:898703aa2667e3c5ca4c54ca36cd73f58b7a38ef87a5606414799ebce4d3fd3a", size = 1803724, upload-time = "2026-03-31T21:59:48.656Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b4/57712dfc6f1542f067daa81eb61da282fab3e6f1966fca25db06c4fc62d5/aiohttp-3.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0494a01ca9584eea1e5fbd6d748e61ecff218c51b576ee1999c23db7066417d8", size = 1640027, upload-time = "2026-03-31T21:59:51.284Z" }, + { url = "https://files.pythonhosted.org/packages/25/3c/734c878fb43ec083d8e31bf029daae1beafeae582d1b35da234739e82ee7/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6cf81fe010b8c17b09495cbd15c1d35afbc8fb405c0c9cf4738e5ae3af1d65be", size = 1806644, upload-time = "2026-03-31T21:59:53.753Z" }, + { url = "https://files.pythonhosted.org/packages/20/a5/f671e5cbec1c21d044ff3078223f949748f3a7f86b14e34a365d74a5d21f/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:c564dd5f09ddc9d8f2c2d0a301cd30a79a2cc1b46dd1a73bef8f0038863d016b", size = 1791630, upload-time = "2026-03-31T21:59:56.239Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/fb8d0ad63a0b8a99be97deac8c04dacf0785721c158bdf23d679a87aa99e/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:2994be9f6e51046c4f864598fd9abeb4fba6e88f0b2152422c9666dcd4aea9c6", size = 1809403, upload-time = "2026-03-31T21:59:59.103Z" }, + { url = "https://files.pythonhosted.org/packages/59/0c/bfed7f30662fcf12206481c2aac57dedee43fe1c49275e85b3a1e1742294/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:157826e2fa245d2ef46c83ea8a5faf77ca19355d278d425c29fda0beb3318037", size = 1634924, upload-time = "2026-03-31T22:00:02.116Z" }, + { url = "https://files.pythonhosted.org/packages/17/d6/fd518d668a09fd5a3319ae5e984d4d80b9a4b3df4e21c52f02251ef5a32e/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:a8aca50daa9493e9e13c0f566201a9006f080e7c50e5e90d0b06f53146a54500", size = 1836119, upload-time = "2026-03-31T22:00:04.756Z" }, + { url = "https://files.pythonhosted.org/packages/78/b7/15fb7a9d52e112a25b621c67b69c167805cb1f2ab8f1708a5c490d1b52fe/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3b13560160d07e047a93f23aaa30718606493036253d5430887514715b67c9d9", size = 1772072, upload-time = "2026-03-31T22:00:07.494Z" }, + { url = "https://files.pythonhosted.org/packages/7e/df/57ba7f0c4a553fc2bd8b6321df236870ec6fd64a2a473a8a13d4f733214e/aiohttp-3.13.5-cp314-cp314t-win32.whl", hash = "sha256:9a0f4474b6ea6818b41f82172d799e4b3d29e22c2c520ce4357856fced9af2f8", size = 471819, upload-time = "2026-03-31T22:00:10.277Z" }, + { url = "https://files.pythonhosted.org/packages/62/29/2f8418269e46454a26171bfdd6a055d74febf32234e474930f2f60a17145/aiohttp-3.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:18a2f6c1182c51baa1d28d68fea51513cb2a76612f038853c0ad3c145423d3d9", size = 505441, upload-time = "2026-03-31T22:00:12.791Z" }, ] [[package]] @@ -158,6 +158,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/00/b7/e3bf5133d697a08128598c8d0abc5e16377b51465a33756de24fa7dee953/aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb", size = 17405, upload-time = "2025-12-23T19:25:42.139Z" }, ] +[[package]] +name = "alembic" +version = "1.18.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mako" }, + { name = "sqlalchemy" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/13/8b084e0f2efb0275a1d534838844926f798bd766566b1375174e2448cd31/alembic-1.18.4.tar.gz", hash = "sha256:cb6e1fd84b6174ab8dbb2329f86d631ba9559dd78df550b57804d607672cedbc", size = 2056725, upload-time = "2026-02-10T16:00:47.195Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/29/6533c317b74f707ea28f8d633734dbda2119bbadfc61b2f3640ba835d0f7/alembic-1.18.4-py3-none-any.whl", hash = "sha256:a5ed4adcf6d8a4cb575f3d759f071b03cd6e5c7618eb796cb52497be25bfe19a", size = 263893, upload-time = "2026-02-10T16:00:49.997Z" }, +] + [[package]] name = "annotated-doc" version = "0.0.4" @@ -178,7 +192,7 @@ wheels = [ [[package]] name = "anthropic" -version = "0.84.0" +version = "0.97.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -190,31 +204,71 @@ dependencies = [ { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/ea/0869d6df9ef83dcf393aeefc12dd81677d091c6ffc86f783e51cf44062f2/anthropic-0.84.0.tar.gz", hash = "sha256:72f5f90e5aebe62dca316cb013629cfa24996b0f5a4593b8c3d712bc03c43c37", size = 539457, upload-time = "2026-02-25T05:22:38.54Z" } +sdist = { url = "https://files.pythonhosted.org/packages/14/93/f66ea8bfe39f2e6bb9da8e27fa5457ad2520e8f7612dfc547b17fad55c4d/anthropic-0.97.0.tar.gz", hash = "sha256:021e79fd8e21e90ad94dc5ba2bbbd8b1599f424f5b1fab6c06204009cab764be", size = 669502, upload-time = "2026-04-23T20:52:34.445Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/ca/218fa25002a332c0aa149ba18ffc0543175998b1f65de63f6d106689a345/anthropic-0.84.0-py3-none-any.whl", hash = "sha256:861c4c50f91ca45f942e091d83b60530ad6d4f98733bfe648065364da05d29e7", size = 455156, upload-time = "2026-02-25T05:22:40.468Z" }, + { url = "https://files.pythonhosted.org/packages/53/b6/8e851369fa661ad0fef2ae6266bf3b7d52b78ccf011720058f4adaca59e2/anthropic-0.97.0-py3-none-any.whl", hash = "sha256:8a1a472dfabcfc0c52ff6a3eecf724ac7e07107a2f6e2367be55ceb42f5d5613", size = 662126, upload-time = "2026-04-23T20:52:32.377Z" }, ] [[package]] name = "anyio" -version = "4.12.1" +version = "4.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, + { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" }, +] + +[[package]] +name = "asyncpg" +version = "0.31.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/a6/59d0a146e61d20e18db7396583242e32e0f120693b67a8de43f1557033e2/asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad", size = 662042, upload-time = "2025-11-24T23:25:49.578Z" }, + { url = "https://files.pythonhosted.org/packages/36/01/ffaa189dcb63a2471720615e60185c3f6327716fdc0fc04334436fbb7c65/asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d", size = 638504, upload-time = "2025-11-24T23:25:51.501Z" }, + { url = "https://files.pythonhosted.org/packages/9f/62/3f699ba45d8bd24c5d65392190d19656d74ff0185f42e19d0bbd973bb371/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a", size = 3426241, upload-time = "2025-11-24T23:25:53.278Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d1/a867c2150f9c6e7af6462637f613ba67f78a314b00db220cd26ff559d532/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:aad7a33913fb8bcb5454313377cc330fbb19a0cd5faa7272407d8a0c4257b671", size = 3520321, upload-time = "2025-11-24T23:25:54.982Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1a/cce4c3f246805ecd285a3591222a2611141f1669d002163abef999b60f98/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3df118d94f46d85b2e434fd62c84cb66d5834d5a890725fe625f498e72e4d5ec", size = 3316685, upload-time = "2025-11-24T23:25:57.43Z" }, + { url = "https://files.pythonhosted.org/packages/40/ae/0fc961179e78cc579e138fad6eb580448ecae64908f95b8cb8ee2f241f67/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5b6efff3c17c3202d4b37189969acf8927438a238c6257f66be3c426beba20", size = 3471858, upload-time = "2025-11-24T23:25:59.636Z" }, + { url = "https://files.pythonhosted.org/packages/52/b2/b20e09670be031afa4cbfabd645caece7f85ec62d69c312239de568e058e/asyncpg-0.31.0-cp312-cp312-win32.whl", hash = "sha256:027eaa61361ec735926566f995d959ade4796f6a49d3bde17e5134b9964f9ba8", size = 527852, upload-time = "2025-11-24T23:26:01.084Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f0/f2ed1de154e15b107dc692262395b3c17fc34eafe2a78fc2115931561730/asyncpg-0.31.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d6bdcbc93d608a1158f17932de2321f68b1a967a13e014998db87a72ed3186", size = 597175, upload-time = "2025-11-24T23:26:02.564Z" }, + { url = "https://files.pythonhosted.org/packages/95/11/97b5c2af72a5d0b9bc3fa30cd4b9ce22284a9a943a150fdc768763caf035/asyncpg-0.31.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c204fab1b91e08b0f47e90a75d1b3c62174dab21f670ad6c5d0f243a228f015b", size = 661111, upload-time = "2025-11-24T23:26:04.467Z" }, + { url = "https://files.pythonhosted.org/packages/1b/71/157d611c791a5e2d0423f09f027bd499935f0906e0c2a416ce712ba51ef3/asyncpg-0.31.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54a64f91839ba59008eccf7aad2e93d6e3de688d796f35803235ea1c4898ae1e", size = 636928, upload-time = "2025-11-24T23:26:05.944Z" }, + { url = "https://files.pythonhosted.org/packages/2e/fc/9e3486fb2bbe69d4a867c0b76d68542650a7ff1574ca40e84c3111bb0c6e/asyncpg-0.31.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0e0822b1038dc7253b337b0f3f676cadc4ac31b126c5d42691c39691962e403", size = 3424067, upload-time = "2025-11-24T23:26:07.957Z" }, + { url = "https://files.pythonhosted.org/packages/12/c6/8c9d076f73f07f995013c791e018a1cd5f31823c2a3187fc8581706aa00f/asyncpg-0.31.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bef056aa502ee34204c161c72ca1f3c274917596877f825968368b2c33f585f4", size = 3518156, upload-time = "2025-11-24T23:26:09.591Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3b/60683a0baf50fbc546499cfb53132cb6835b92b529a05f6a81471ab60d0c/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0bfbcc5b7ffcd9b75ab1558f00db2ae07db9c80637ad1b2469c43df79d7a5ae2", size = 3319636, upload-time = "2025-11-24T23:26:11.168Z" }, + { url = "https://files.pythonhosted.org/packages/50/dc/8487df0f69bd398a61e1792b3cba0e47477f214eff085ba0efa7eac9ce87/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22bc525ebbdc24d1261ecbf6f504998244d4e3be1721784b5f64664d61fbe602", size = 3472079, upload-time = "2025-11-24T23:26:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/13/a1/c5bbeeb8531c05c89135cb8b28575ac2fac618bcb60119ee9696c3faf71c/asyncpg-0.31.0-cp313-cp313-win32.whl", hash = "sha256:f890de5e1e4f7e14023619399a471ce4b71f5418cd67a51853b9910fdfa73696", size = 527606, upload-time = "2025-11-24T23:26:14.78Z" }, + { url = "https://files.pythonhosted.org/packages/91/66/b25ccb84a246b470eb943b0107c07edcae51804912b824054b3413995a10/asyncpg-0.31.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc5f2fa9916f292e5c5c8b2ac2813763bcd7f58e130055b4ad8a0531314201ab", size = 596569, upload-time = "2025-11-24T23:26:16.189Z" }, + { url = "https://files.pythonhosted.org/packages/3c/36/e9450d62e84a13aea6580c83a47a437f26c7ca6fa0f0fd40b6670793ea30/asyncpg-0.31.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f6b56b91bb0ffc328c4e3ed113136cddd9deefdf5f79ab448598b9772831df44", size = 660867, upload-time = "2025-11-24T23:26:17.631Z" }, + { url = "https://files.pythonhosted.org/packages/82/4b/1d0a2b33b3102d210439338e1beea616a6122267c0df459ff0265cd5807a/asyncpg-0.31.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:334dec28cf20d7f5bb9e45b39546ddf247f8042a690bff9b9573d00086e69cb5", size = 638349, upload-time = "2025-11-24T23:26:19.689Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/e7f7ac9a7974f08eff9183e392b2d62516f90412686532d27e196c0f0eeb/asyncpg-0.31.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98cc158c53f46de7bb677fd20c417e264fc02b36d901cc2a43bd6cb0dc6dbfd2", size = 3410428, upload-time = "2025-11-24T23:26:21.275Z" }, + { url = "https://files.pythonhosted.org/packages/6f/de/bf1b60de3dede5c2731e6788617a512bc0ebd9693eac297ee74086f101d7/asyncpg-0.31.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9322b563e2661a52e3cdbc93eed3be7748b289f792e0011cb2720d278b366ce2", size = 3471678, upload-time = "2025-11-24T23:26:23.627Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/fc3ade003e22d8bd53aaf8f75f4be48f0b460fa73738f0391b9c856a9147/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19857a358fc811d82227449b7ca40afb46e75b33eb8897240c3839dd8b744218", size = 3313505, upload-time = "2025-11-24T23:26:25.235Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/73eb8a6789e927816f4705291be21f2225687bfa97321e40cd23055e903a/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ba5f8886e850882ff2c2ace5732300e99193823e8107e2c53ef01c1ebfa1e85d", size = 3434744, upload-time = "2025-11-24T23:26:26.944Z" }, + { url = "https://files.pythonhosted.org/packages/08/4b/f10b880534413c65c5b5862f79b8e81553a8f364e5238832ad4c0af71b7f/asyncpg-0.31.0-cp314-cp314-win32.whl", hash = "sha256:cea3a0b2a14f95834cee29432e4ddc399b95700eb1d51bbc5bfee8f31fa07b2b", size = 532251, upload-time = "2025-11-24T23:26:28.404Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2d/7aa40750b7a19efa5d66e67fc06008ca0f27ba1bd082e457ad82f59aba49/asyncpg-0.31.0-cp314-cp314-win_amd64.whl", hash = "sha256:04d19392716af6b029411a0264d92093b6e5e8285ae97a39957b9a9c14ea72be", size = 604901, upload-time = "2025-11-24T23:26:30.34Z" }, + { url = "https://files.pythonhosted.org/packages/ce/fe/b9dfe349b83b9dee28cc42360d2c86b2cdce4cb551a2c2d27e156bcac84d/asyncpg-0.31.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bdb957706da132e982cc6856bb2f7b740603472b54c3ebc77fe60ea3e57e1bd2", size = 702280, upload-time = "2025-11-24T23:26:32Z" }, + { url = "https://files.pythonhosted.org/packages/6a/81/e6be6e37e560bd91e6c23ea8a6138a04fd057b08cf63d3c5055c98e81c1d/asyncpg-0.31.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6d11b198111a72f47154fa03b85799f9be63701e068b43f84ac25da0bda9cb31", size = 682931, upload-time = "2025-11-24T23:26:33.572Z" }, + { url = "https://files.pythonhosted.org/packages/a6/45/6009040da85a1648dd5bc75b3b0a062081c483e75a1a29041ae63a0bf0dc/asyncpg-0.31.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18c83b03bc0d1b23e6230f5bf8d4f217dc9bc08644ce0502a9d91dc9e634a9c7", size = 3581608, upload-time = "2025-11-24T23:26:35.638Z" }, + { url = "https://files.pythonhosted.org/packages/7e/06/2e3d4d7608b0b2b3adbee0d0bd6a2d29ca0fc4d8a78f8277df04e2d1fd7b/asyncpg-0.31.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e009abc333464ff18b8f6fd146addffd9aaf63e79aa3bb40ab7a4c332d0c5e9e", size = 3498738, upload-time = "2025-11-24T23:26:37.275Z" }, + { url = "https://files.pythonhosted.org/packages/7d/aa/7d75ede780033141c51d83577ea23236ba7d3a23593929b32b49db8ed36e/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3b1fbcb0e396a5ca435a8826a87e5c2c2cc0c8c68eb6fadf82168056b0e53a8c", size = 3401026, upload-time = "2025-11-24T23:26:39.423Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7a/15e37d45e7f7c94facc1e9148c0e455e8f33c08f0b8a0b1deb2c5171771b/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8df714dba348efcc162d2adf02d213e5fab1bd9f557e1305633e851a61814a7a", size = 3429426, upload-time = "2025-11-24T23:26:41.032Z" }, + { url = "https://files.pythonhosted.org/packages/13/d5/71437c5f6ae5f307828710efbe62163974e71237d5d46ebd2869ea052d10/asyncpg-0.31.0-cp314-cp314t-win32.whl", hash = "sha256:1b41f1afb1033f2b44f3234993b15096ddc9cd71b21a42dbd87fc6a57b43d65d", size = 614495, upload-time = "2025-11-24T23:26:42.659Z" }, + { url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" }, ] [[package]] name = "attrs" -version = "25.4.0" +version = "26.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/8e/82a0fe20a541c03148528be8cac2408564a6c9a0cc7e9171802bc1d26985/attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32", size = 952055, upload-time = "2026-03-19T14:22:25.026Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, + { url = "https://files.pythonhosted.org/packages/64/b4/17d4b0b2a2dc85a6df63d1157e028ed19f90d4cd97c36717afef2bc2f395/attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309", size = 67548, upload-time = "2026-03-19T14:22:23.645Z" }, ] [[package]] @@ -289,20 +343,20 @@ wheels = [ [[package]] name = "azure-core" -version = "1.38.0" +version = "1.39.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dc/1b/e503e08e755ea94e7d3419c9242315f888fc664211c90d032e40479022bf/azure_core-1.38.0.tar.gz", hash = "sha256:8194d2682245a3e4e3151a667c686464c3786fed7918b394d035bdcd61bb5993", size = 363033, upload-time = "2026-01-12T17:03:05.535Z" } +sdist = { url = "https://files.pythonhosted.org/packages/34/83/bbde3faa84ddcb8eb0eca4b3ffb3221252281db4ce351300fe248c5c70b1/azure_core-1.39.0.tar.gz", hash = "sha256:8a90a562998dd44ce84597590fff6249701b98c0e8797c95fcdd695b54c35d74", size = 367531, upload-time = "2026-03-19T01:31:29.461Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/d8/b8fcba9464f02b121f39de2db2bf57f0b216fe11d014513d666e8634380d/azure_core-1.38.0-py3-none-any.whl", hash = "sha256:ab0c9b2cd71fecb1842d52c965c95285d3cfb38902f6766e4a471f1cd8905335", size = 217825, upload-time = "2026-01-12T17:03:07.291Z" }, + { url = "https://files.pythonhosted.org/packages/7e/d6/8ebcd05b01a580f086ac9a97fb9fac65c09a4b012161cc97c21a336e880b/azure_core-1.39.0-py3-none-any.whl", hash = "sha256:4ac7b70fab5438c3f68770649a78daf97833caa83827f91df9c14e0e0ea7d34f", size = 218318, upload-time = "2026-03-19T01:31:31.25Z" }, ] [[package]] name = "azure-identity" -version = "1.25.1" +version = "1.25.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-core" }, @@ -311,9 +365,9 @@ dependencies = [ { name = "msal-extensions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/8d/1a6c41c28a37eab26dc85ab6c86992c700cd3f4a597d9ed174b0e9c69489/azure_identity-1.25.1.tar.gz", hash = "sha256:87ca8328883de6036443e1c37b40e8dc8fb74898240f61071e09d2e369361456", size = 279826, upload-time = "2025-10-06T20:30:02.194Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/0e/3a63efb48aa4a5ae2cfca61ee152fbcb668092134d3eb8bfda472dd5c617/azure_identity-1.25.3.tar.gz", hash = "sha256:ab23c0d63015f50b630ef6c6cf395e7262f439ce06e5d07a64e874c724f8d9e6", size = 286304, upload-time = "2026-03-13T01:12:20.892Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/7b/5652771e24fff12da9dde4c20ecf4682e606b104f26419d139758cc935a6/azure_identity-1.25.1-py3-none-any.whl", hash = "sha256:e9edd720af03dff020223cd269fa3a61e8f345ea75443858273bcb44844ab651", size = 191317, upload-time = "2025-10-06T20:30:04.251Z" }, + { url = "https://files.pythonhosted.org/packages/49/9a/417b3a533e01953a7c618884df2cb05a71e7b68bdbce4fbdb62349d2a2e8/azure_identity-1.25.3-py3-none-any.whl", hash = "sha256:f4d0b956a8146f30333e071374171f3cfa7bdb8073adb8c3814b65567aa7447c", size = 192138, upload-time = "2026-03-13T01:12:22.951Z" }, ] [[package]] @@ -325,6 +379,72 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148, upload-time = "2022-10-05T19:19:30.546Z" }, ] +[[package]] +name = "bcrypt" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz", hash = "sha256:f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", size = 25386, upload-time = "2025-09-25T19:50:47.829Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/85/3e65e01985fddf25b64ca67275bb5bdb4040bd1a53b66d355c6c37c8a680/bcrypt-5.0.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f3c08197f3039bec79cee59a606d62b96b16669cff3949f21e74796b6e3cd2be", size = 481806, upload-time = "2025-09-25T19:49:05.102Z" }, + { url = "https://files.pythonhosted.org/packages/44/dc/01eb79f12b177017a726cbf78330eb0eb442fae0e7b3dfd84ea2849552f3/bcrypt-5.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:200af71bc25f22006f4069060c88ed36f8aa4ff7f53e67ff04d2ab3f1e79a5b2", size = 268626, upload-time = "2025-09-25T19:49:06.723Z" }, + { url = "https://files.pythonhosted.org/packages/8c/cf/e82388ad5959c40d6afd94fb4743cc077129d45b952d46bdc3180310e2df/bcrypt-5.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:baade0a5657654c2984468efb7d6c110db87ea63ef5a4b54732e7e337253e44f", size = 271853, upload-time = "2025-09-25T19:49:08.028Z" }, + { url = "https://files.pythonhosted.org/packages/ec/86/7134b9dae7cf0efa85671651341f6afa695857fae172615e960fb6a466fa/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c58b56cdfb03202b3bcc9fd8daee8e8e9b6d7e3163aa97c631dfcfcc24d36c86", size = 269793, upload-time = "2025-09-25T19:49:09.727Z" }, + { url = "https://files.pythonhosted.org/packages/cc/82/6296688ac1b9e503d034e7d0614d56e80c5d1a08402ff856a4549cb59207/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4bfd2a34de661f34d0bda43c3e4e79df586e4716ef401fe31ea39d69d581ef23", size = 289930, upload-time = "2025-09-25T19:49:11.204Z" }, + { url = "https://files.pythonhosted.org/packages/d1/18/884a44aa47f2a3b88dd09bc05a1e40b57878ecd111d17e5bba6f09f8bb77/bcrypt-5.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ed2e1365e31fc73f1825fa830f1c8f8917ca1b3ca6185773b349c20fd606cec2", size = 272194, upload-time = "2025-09-25T19:49:12.524Z" }, + { url = "https://files.pythonhosted.org/packages/0e/8f/371a3ab33c6982070b674f1788e05b656cfbf5685894acbfef0c65483a59/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:83e787d7a84dbbfba6f250dd7a5efd689e935f03dd83b0f919d39349e1f23f83", size = 269381, upload-time = "2025-09-25T19:49:14.308Z" }, + { url = "https://files.pythonhosted.org/packages/b1/34/7e4e6abb7a8778db6422e88b1f06eb07c47682313997ee8a8f9352e5a6f1/bcrypt-5.0.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:137c5156524328a24b9fac1cb5db0ba618bc97d11970b39184c1d87dc4bf1746", size = 271750, upload-time = "2025-09-25T19:49:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1b/54f416be2499bd72123c70d98d36c6cd61a4e33d9b89562c22481c81bb30/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:38cac74101777a6a7d3b3e3cfefa57089b5ada650dce2baf0cbdd9d65db22a9e", size = 303757, upload-time = "2025-09-25T19:49:17.244Z" }, + { url = "https://files.pythonhosted.org/packages/13/62/062c24c7bcf9d2826a1a843d0d605c65a755bc98002923d01fd61270705a/bcrypt-5.0.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:d8d65b564ec849643d9f7ea05c6d9f0cd7ca23bdd4ac0c2dbef1104ab504543d", size = 306740, upload-time = "2025-09-25T19:49:18.693Z" }, + { url = "https://files.pythonhosted.org/packages/d5/c8/1fdbfc8c0f20875b6b4020f3c7dc447b8de60aa0be5faaf009d24242aec9/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:741449132f64b3524e95cd30e5cd3343006ce146088f074f31ab26b94e6c75ba", size = 334197, upload-time = "2025-09-25T19:49:20.523Z" }, + { url = "https://files.pythonhosted.org/packages/a6/c1/8b84545382d75bef226fbc6588af0f7b7d095f7cd6a670b42a86243183cd/bcrypt-5.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:212139484ab3207b1f0c00633d3be92fef3c5f0af17cad155679d03ff2ee1e41", size = 352974, upload-time = "2025-09-25T19:49:22.254Z" }, + { url = "https://files.pythonhosted.org/packages/10/a6/ffb49d4254ed085e62e3e5dd05982b4393e32fe1e49bb1130186617c29cd/bcrypt-5.0.0-cp313-cp313t-win32.whl", hash = "sha256:9d52ed507c2488eddd6a95bccee4e808d3234fa78dd370e24bac65a21212b861", size = 148498, upload-time = "2025-09-25T19:49:24.134Z" }, + { url = "https://files.pythonhosted.org/packages/48/a9/259559edc85258b6d5fc5471a62a3299a6aa37a6611a169756bf4689323c/bcrypt-5.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f6984a24db30548fd39a44360532898c33528b74aedf81c26cf29c51ee47057e", size = 145853, upload-time = "2025-09-25T19:49:25.702Z" }, + { url = "https://files.pythonhosted.org/packages/2d/df/9714173403c7e8b245acf8e4be8876aac64a209d1b392af457c79e60492e/bcrypt-5.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9fffdb387abe6aa775af36ef16f55e318dcda4194ddbf82007a6f21da29de8f5", size = 139626, upload-time = "2025-09-25T19:49:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/f8/14/c18006f91816606a4abe294ccc5d1e6f0e42304df5a33710e9e8e95416e1/bcrypt-5.0.0-cp314-cp314t-macosx_10_12_universal2.whl", hash = "sha256:4870a52610537037adb382444fefd3706d96d663ac44cbb2f37e3919dca3d7ef", size = 481862, upload-time = "2025-09-25T19:49:28.365Z" }, + { url = "https://files.pythonhosted.org/packages/67/49/dd074d831f00e589537e07a0725cf0e220d1f0d5d8e85ad5bbff251c45aa/bcrypt-5.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48f753100931605686f74e27a7b49238122aa761a9aefe9373265b8b7aa43ea4", size = 268544, upload-time = "2025-09-25T19:49:30.39Z" }, + { url = "https://files.pythonhosted.org/packages/f5/91/50ccba088b8c474545b034a1424d05195d9fcbaaf802ab8bfe2be5a4e0d7/bcrypt-5.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f70aadb7a809305226daedf75d90379c397b094755a710d7014b8b117df1ebbf", size = 271787, upload-time = "2025-09-25T19:49:32.144Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e7/d7dba133e02abcda3b52087a7eea8c0d4f64d3e593b4fffc10c31b7061f3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:744d3c6b164caa658adcb72cb8cc9ad9b4b75c7db507ab4bc2480474a51989da", size = 269753, upload-time = "2025-09-25T19:49:33.885Z" }, + { url = "https://files.pythonhosted.org/packages/33/fc/5b145673c4b8d01018307b5c2c1fc87a6f5a436f0ad56607aee389de8ee3/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a28bc05039bdf3289d757f49d616ab3efe8cf40d8e8001ccdd621cd4f98f4fc9", size = 289587, upload-time = "2025-09-25T19:49:35.144Z" }, + { url = "https://files.pythonhosted.org/packages/27/d7/1ff22703ec6d4f90e62f1a5654b8867ef96bafb8e8102c2288333e1a6ca6/bcrypt-5.0.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:7f277a4b3390ab4bebe597800a90da0edae882c6196d3038a73adf446c4f969f", size = 272178, upload-time = "2025-09-25T19:49:36.793Z" }, + { url = "https://files.pythonhosted.org/packages/c8/88/815b6d558a1e4d40ece04a2f84865b0fef233513bd85fd0e40c294272d62/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:79cfa161eda8d2ddf29acad370356b47f02387153b11d46042e93a0a95127493", size = 269295, upload-time = "2025-09-25T19:49:38.164Z" }, + { url = "https://files.pythonhosted.org/packages/51/8c/e0db387c79ab4931fc89827d37608c31cc57b6edc08ccd2386139028dc0d/bcrypt-5.0.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a5393eae5722bcef046a990b84dff02b954904c36a194f6cfc817d7dca6c6f0b", size = 271700, upload-time = "2025-09-25T19:49:39.917Z" }, + { url = "https://files.pythonhosted.org/packages/06/83/1570edddd150f572dbe9fc00f6203a89fc7d4226821f67328a85c330f239/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f4c94dec1b5ab5d522750cb059bb9409ea8872d4494fd152b53cca99f1ddd8c", size = 334034, upload-time = "2025-09-25T19:49:41.227Z" }, + { url = "https://files.pythonhosted.org/packages/c9/f2/ea64e51a65e56ae7a8a4ec236c2bfbdd4b23008abd50ac33fbb2d1d15424/bcrypt-5.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0cae4cb350934dfd74c020525eeae0a5f79257e8a201c0c176f4b84fdbf2a4b4", size = 352766, upload-time = "2025-09-25T19:49:43.08Z" }, + { url = "https://files.pythonhosted.org/packages/d7/d4/1a388d21ee66876f27d1a1f41287897d0c0f1712ef97d395d708ba93004c/bcrypt-5.0.0-cp314-cp314t-win32.whl", hash = "sha256:b17366316c654e1ad0306a6858e189fc835eca39f7eb2cafd6aaca8ce0c40a2e", size = 152449, upload-time = "2025-09-25T19:49:44.971Z" }, + { url = "https://files.pythonhosted.org/packages/3f/61/3291c2243ae0229e5bca5d19f4032cecad5dfb05a2557169d3a69dc0ba91/bcrypt-5.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:92864f54fb48b4c718fc92a32825d0e42265a627f956bc0361fe869f1adc3e7d", size = 149310, upload-time = "2025-09-25T19:49:46.162Z" }, + { url = "https://files.pythonhosted.org/packages/3e/89/4b01c52ae0c1a681d4021e5dd3e45b111a8fb47254a274fa9a378d8d834b/bcrypt-5.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dd19cf5184a90c873009244586396a6a884d591a5323f0e8a5922560718d4993", size = 143761, upload-time = "2025-09-25T19:49:47.345Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", size = 494553, upload-time = "2025-09-25T19:49:49.006Z" }, + { url = "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", size = 275009, upload-time = "2025-09-25T19:49:50.581Z" }, + { url = "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", size = 278029, upload-time = "2025-09-25T19:49:52.533Z" }, + { url = "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", size = 275907, upload-time = "2025-09-25T19:49:54.709Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", size = 296500, upload-time = "2025-09-25T19:49:56.013Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", size = 278412, upload-time = "2025-09-25T19:49:57.356Z" }, + { url = "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", size = 275486, upload-time = "2025-09-25T19:49:59.116Z" }, + { url = "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", size = 277940, upload-time = "2025-09-25T19:50:00.869Z" }, + { url = "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", size = 310776, upload-time = "2025-09-25T19:50:02.393Z" }, + { url = "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", size = 312922, upload-time = "2025-09-25T19:50:04.232Z" }, + { url = "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", size = 341367, upload-time = "2025-09-25T19:50:05.559Z" }, + { url = "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", size = 359187, upload-time = "2025-09-25T19:50:06.916Z" }, + { url = "https://files.pythonhosted.org/packages/1b/bb/461f352fdca663524b4643d8b09e8435b4990f17fbf4fea6bc2a90aa0cc7/bcrypt-5.0.0-cp38-abi3-win32.whl", hash = "sha256:3abeb543874b2c0524ff40c57a4e14e5d3a66ff33fb423529c88f180fd756538", size = 153752, upload-time = "2025-09-25T19:50:08.515Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/4190e60921927b7056820291f56fc57d00d04757c8b316b2d3c0d1d6da2c/bcrypt-5.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:35a77ec55b541e5e583eb3436ffbbf53b0ffa1fa16ca6782279daf95d146dcd9", size = 150881, upload-time = "2025-09-25T19:50:09.742Z" }, + { url = "https://files.pythonhosted.org/packages/54/12/cd77221719d0b39ac0b55dbd39358db1cd1246e0282e104366ebbfb8266a/bcrypt-5.0.0-cp38-abi3-win_arm64.whl", hash = "sha256:cde08734f12c6a4e28dc6755cd11d3bdfea608d93d958fffbe95a7026ebe4980", size = 144931, upload-time = "2025-09-25T19:50:11.016Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", size = 495313, upload-time = "2025-09-25T19:50:12.309Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", size = 275290, upload-time = "2025-09-25T19:50:13.673Z" }, + { url = "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", size = 278253, upload-time = "2025-09-25T19:50:15.089Z" }, + { url = "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", size = 276084, upload-time = "2025-09-25T19:50:16.699Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", size = 297185, upload-time = "2025-09-25T19:50:18.525Z" }, + { url = "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", size = 278656, upload-time = "2025-09-25T19:50:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", size = 275662, upload-time = "2025-09-25T19:50:21.567Z" }, + { url = "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", size = 278240, upload-time = "2025-09-25T19:50:23.305Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", size = 311152, upload-time = "2025-09-25T19:50:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", size = 313284, upload-time = "2025-09-25T19:50:26.268Z" }, + { url = "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", size = 341643, upload-time = "2025-09-25T19:50:28.02Z" }, + { url = "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", size = 359698, upload-time = "2025-09-25T19:50:31.347Z" }, + { url = "https://files.pythonhosted.org/packages/c0/f6/688d2cd64bfd0b14d805ddb8a565e11ca1fb0fd6817175d58b10052b6d88/bcrypt-5.0.0-cp39-abi3-win32.whl", hash = "sha256:64d7ce196203e468c457c37ec22390f1a61c85c6f0b8160fd752940ccfb3a683", size = 153725, upload-time = "2025-09-25T19:50:34.384Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b9/9d9a641194a730bda138b3dfe53f584d61c58cd5230e37566e83ec2ffa0d/bcrypt-5.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:64ee8434b0da054d830fa8e89e1c8bf30061d539044a39524ff7dec90481e5c2", size = 150912, upload-time = "2025-09-25T19:50:35.69Z" }, + { url = "https://files.pythonhosted.org/packages/27/44/d2ef5e87509158ad2187f4dd0852df80695bb1ee0cfe0a684727b01a69e0/bcrypt-5.0.0-cp39-abi3-win_arm64.whl", hash = "sha256:f2347d3534e76bf50bca5500989d6c1d05ed64b440408057a37673282c654927", size = 144953, upload-time = "2025-09-25T19:50:37.32Z" }, +] + [[package]] name = "beautifulsoup4" version = "4.14.3" @@ -350,72 +470,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/95/c1/84fc6811122f54b20de2e5afb312ee07a3a47a328755587d1e505475239b/blockbuster-1.5.26-py3-none-any.whl", hash = "sha256:f8e53fb2dd4b6c6ec2f04907ddbd063ca7cd1ef587d24448ef4e50e81e3a79bb", size = 13226, upload-time = "2025-12-05T10:43:48.778Z" }, ] -[[package]] -name = "brotli" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f7/16/c92ca344d646e71a43b8bb353f0a6490d7f6e06210f8554c8f874e454285/brotli-1.2.0.tar.gz", hash = "sha256:e310f77e41941c13340a95976fe66a8a95b01e783d430eeaf7a2f87e0a57dd0a", size = 7388632, upload-time = "2025-11-05T18:39:42.86Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/11/ee/b0a11ab2315c69bb9b45a2aaed022499c9c24a205c3a49c3513b541a7967/brotli-1.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:35d382625778834a7f3061b15423919aa03e4f5da34ac8e02c074e4b75ab4f84", size = 861543, upload-time = "2025-11-05T18:38:24.183Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2f/29c1459513cd35828e25531ebfcbf3e92a5e49f560b1777a9af7203eb46e/brotli-1.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a61c06b334bd99bc5ae84f1eeb36bfe01400264b3c352f968c6e30a10f9d08b", size = 444288, upload-time = "2025-11-05T18:38:25.139Z" }, - { url = "https://files.pythonhosted.org/packages/3d/6f/feba03130d5fceadfa3a1bb102cb14650798c848b1df2a808356f939bb16/brotli-1.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:acec55bb7c90f1dfc476126f9711a8e81c9af7fb617409a9ee2953115343f08d", size = 1528071, upload-time = "2025-11-05T18:38:26.081Z" }, - { url = "https://files.pythonhosted.org/packages/2b/38/f3abb554eee089bd15471057ba85f47e53a44a462cfce265d9bf7088eb09/brotli-1.2.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:260d3692396e1895c5034f204f0db022c056f9e2ac841593a4cf9426e2a3faca", size = 1626913, upload-time = "2025-11-05T18:38:27.284Z" }, - { url = "https://files.pythonhosted.org/packages/03/a7/03aa61fbc3c5cbf99b44d158665f9b0dd3d8059be16c460208d9e385c837/brotli-1.2.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:072e7624b1fc4d601036ab3f4f27942ef772887e876beff0301d261210bca97f", size = 1419762, upload-time = "2025-11-05T18:38:28.295Z" }, - { url = "https://files.pythonhosted.org/packages/21/1b/0374a89ee27d152a5069c356c96b93afd1b94eae83f1e004b57eb6ce2f10/brotli-1.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adedc4a67e15327dfdd04884873c6d5a01d3e3b6f61406f99b1ed4865a2f6d28", size = 1484494, upload-time = "2025-11-05T18:38:29.29Z" }, - { url = "https://files.pythonhosted.org/packages/cf/57/69d4fe84a67aef4f524dcd075c6eee868d7850e85bf01d778a857d8dbe0a/brotli-1.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7a47ce5c2288702e09dc22a44d0ee6152f2c7eda97b3c8482d826a1f3cfc7da7", size = 1593302, upload-time = "2025-11-05T18:38:30.639Z" }, - { url = "https://files.pythonhosted.org/packages/d5/3b/39e13ce78a8e9a621c5df3aeb5fd181fcc8caba8c48a194cd629771f6828/brotli-1.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:af43b8711a8264bb4e7d6d9a6d004c3a2019c04c01127a868709ec29962b6036", size = 1487913, upload-time = "2025-11-05T18:38:31.618Z" }, - { url = "https://files.pythonhosted.org/packages/62/28/4d00cb9bd76a6357a66fcd54b4b6d70288385584063f4b07884c1e7286ac/brotli-1.2.0-cp312-cp312-win32.whl", hash = "sha256:e99befa0b48f3cd293dafeacdd0d191804d105d279e0b387a32054c1180f3161", size = 334362, upload-time = "2025-11-05T18:38:32.939Z" }, - { url = "https://files.pythonhosted.org/packages/1c/4e/bc1dcac9498859d5e353c9b153627a3752868a9d5f05ce8dedd81a2354ab/brotli-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:b35c13ce241abdd44cb8ca70683f20c0c079728a36a996297adb5334adfc1c44", size = 369115, upload-time = "2025-11-05T18:38:33.765Z" }, - { url = "https://files.pythonhosted.org/packages/6c/d4/4ad5432ac98c73096159d9ce7ffeb82d151c2ac84adcc6168e476bb54674/brotli-1.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9e5825ba2c9998375530504578fd4d5d1059d09621a02065d1b6bfc41a8e05ab", size = 861523, upload-time = "2025-11-05T18:38:34.67Z" }, - { url = "https://files.pythonhosted.org/packages/91/9f/9cc5bd03ee68a85dc4bc89114f7067c056a3c14b3d95f171918c088bf88d/brotli-1.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0cf8c3b8ba93d496b2fae778039e2f5ecc7cff99df84df337ca31d8f2252896c", size = 444289, upload-time = "2025-11-05T18:38:35.6Z" }, - { url = "https://files.pythonhosted.org/packages/2e/b6/fe84227c56a865d16a6614e2c4722864b380cb14b13f3e6bef441e73a85a/brotli-1.2.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8565e3cdc1808b1a34714b553b262c5de5fbda202285782173ec137fd13709f", size = 1528076, upload-time = "2025-11-05T18:38:36.639Z" }, - { url = "https://files.pythonhosted.org/packages/55/de/de4ae0aaca06c790371cf6e7ee93a024f6b4bb0568727da8c3de112e726c/brotli-1.2.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:26e8d3ecb0ee458a9804f47f21b74845cc823fd1bb19f02272be70774f56e2a6", size = 1626880, upload-time = "2025-11-05T18:38:37.623Z" }, - { url = "https://files.pythonhosted.org/packages/5f/16/a1b22cbea436642e071adcaf8d4b350a2ad02f5e0ad0da879a1be16188a0/brotli-1.2.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67a91c5187e1eec76a61625c77a6c8c785650f5b576ca732bd33ef58b0dff49c", size = 1419737, upload-time = "2025-11-05T18:38:38.729Z" }, - { url = "https://files.pythonhosted.org/packages/46/63/c968a97cbb3bdbf7f974ef5a6ab467a2879b82afbc5ffb65b8acbb744f95/brotli-1.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ecdb3b6dc36e6d6e14d3a1bdc6c1057c8cbf80db04031d566eb6080ce283a48", size = 1484440, upload-time = "2025-11-05T18:38:39.916Z" }, - { url = "https://files.pythonhosted.org/packages/06/9d/102c67ea5c9fc171f423e8399e585dabea29b5bc79b05572891e70013cdd/brotli-1.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3e1b35d56856f3ed326b140d3c6d9db91740f22e14b06e840fe4bb1923439a18", size = 1593313, upload-time = "2025-11-05T18:38:41.24Z" }, - { url = "https://files.pythonhosted.org/packages/9e/4a/9526d14fa6b87bc827ba1755a8440e214ff90de03095cacd78a64abe2b7d/brotli-1.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:54a50a9dad16b32136b2241ddea9e4df159b41247b2ce6aac0b3276a66a8f1e5", size = 1487945, upload-time = "2025-11-05T18:38:42.277Z" }, - { url = "https://files.pythonhosted.org/packages/5b/e8/3fe1ffed70cbef83c5236166acaed7bb9c766509b157854c80e2f766b38c/brotli-1.2.0-cp313-cp313-win32.whl", hash = "sha256:1b1d6a4efedd53671c793be6dd760fcf2107da3a52331ad9ea429edf0902f27a", size = 334368, upload-time = "2025-11-05T18:38:43.345Z" }, - { url = "https://files.pythonhosted.org/packages/ff/91/e739587be970a113b37b821eae8097aac5a48e5f0eca438c22e4c7dd8648/brotli-1.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:b63daa43d82f0cdabf98dee215b375b4058cce72871fd07934f179885aad16e8", size = 369116, upload-time = "2025-11-05T18:38:44.609Z" }, - { url = "https://files.pythonhosted.org/packages/17/e1/298c2ddf786bb7347a1cd71d63a347a79e5712a7c0cba9e3c3458ebd976f/brotli-1.2.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:6c12dad5cd04530323e723787ff762bac749a7b256a5bece32b2243dd5c27b21", size = 863080, upload-time = "2025-11-05T18:38:45.503Z" }, - { url = "https://files.pythonhosted.org/packages/84/0c/aac98e286ba66868b2b3b50338ffbd85a35c7122e9531a73a37a29763d38/brotli-1.2.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3219bd9e69868e57183316ee19c84e03e8f8b5a1d1f2667e1aa8c2f91cb061ac", size = 445453, upload-time = "2025-11-05T18:38:46.433Z" }, - { url = "https://files.pythonhosted.org/packages/ec/f1/0ca1f3f99ae300372635ab3fe2f7a79fa335fee3d874fa7f9e68575e0e62/brotli-1.2.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:963a08f3bebd8b75ac57661045402da15991468a621f014be54e50f53a58d19e", size = 1528168, upload-time = "2025-11-05T18:38:47.371Z" }, - { url = "https://files.pythonhosted.org/packages/d6/a6/2ebfc8f766d46df8d3e65b880a2e220732395e6d7dc312c1e1244b0f074a/brotli-1.2.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9322b9f8656782414b37e6af884146869d46ab85158201d82bab9abbcb971dc7", size = 1627098, upload-time = "2025-11-05T18:38:48.385Z" }, - { url = "https://files.pythonhosted.org/packages/f3/2f/0976d5b097ff8a22163b10617f76b2557f15f0f39d6a0fe1f02b1a53e92b/brotli-1.2.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cf9cba6f5b78a2071ec6fb1e7bd39acf35071d90a81231d67e92d637776a6a63", size = 1419861, upload-time = "2025-11-05T18:38:49.372Z" }, - { url = "https://files.pythonhosted.org/packages/9c/97/d76df7176a2ce7616ff94c1fb72d307c9a30d2189fe877f3dd99af00ea5a/brotli-1.2.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7547369c4392b47d30a3467fe8c3330b4f2e0f7730e45e3103d7d636678a808b", size = 1484594, upload-time = "2025-11-05T18:38:50.655Z" }, - { url = "https://files.pythonhosted.org/packages/d3/93/14cf0b1216f43df5609f5b272050b0abd219e0b54ea80b47cef9867b45e7/brotli-1.2.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:fc1530af5c3c275b8524f2e24841cbe2599d74462455e9bae5109e9ff42e9361", size = 1593455, upload-time = "2025-11-05T18:38:51.624Z" }, - { url = "https://files.pythonhosted.org/packages/b3/73/3183c9e41ca755713bdf2cc1d0810df742c09484e2e1ddd693bee53877c1/brotli-1.2.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d2d085ded05278d1c7f65560aae97b3160aeb2ea2c0b3e26204856beccb60888", size = 1488164, upload-time = "2025-11-05T18:38:53.079Z" }, - { url = "https://files.pythonhosted.org/packages/64/6a/0c78d8f3a582859236482fd9fa86a65a60328a00983006bcf6d83b7b2253/brotli-1.2.0-cp314-cp314-win32.whl", hash = "sha256:832c115a020e463c2f67664560449a7bea26b0c1fdd690352addad6d0a08714d", size = 339280, upload-time = "2025-11-05T18:38:54.02Z" }, - { url = "https://files.pythonhosted.org/packages/f5/10/56978295c14794b2c12007b07f3e41ba26acda9257457d7085b0bb3bb90c/brotli-1.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:e7c0af964e0b4e3412a0ebf341ea26ec767fa0b4cf81abb5e897c9338b5ad6a3", size = 375639, upload-time = "2025-11-05T18:38:55.67Z" }, -] - -[[package]] -name = "brotlicffi" -version = "1.2.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/84/85/57c314a6b35336efbbdc13e5fc9ae13f6b60a0647cfa7c1221178ac6d8ae/brotlicffi-1.2.0.0.tar.gz", hash = "sha256:34345d8d1f9d534fcac2249e57a4c3c8801a33c9942ff9f8574f67a175e17adb", size = 476682, upload-time = "2025-11-21T18:17:57.334Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/87/ba6298c3d7f8d66ce80d7a487f2a487ebae74a79c6049c7c2990178ce529/brotlicffi-1.2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b13fb476a96f02e477a506423cb5e7bc21e0e3ac4c060c20ba31c44056e38c68", size = 433038, upload-time = "2026-03-05T17:57:37.96Z" }, - { url = "https://files.pythonhosted.org/packages/00/49/16c7a77d1cae0519953ef0389a11a9c2e2e62e87d04f8e7afbae40124255/brotlicffi-1.2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17db36fb581f7b951635cd6849553a95c6f2f53c1a707817d06eae5aeff5f6af", size = 1541124, upload-time = "2026-03-05T17:57:39.488Z" }, - { url = "https://files.pythonhosted.org/packages/e8/17/fab2c36ea820e2288f8c1bf562de1b6cd9f30e28d66f1ce2929a4baff6de/brotlicffi-1.2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:40190192790489a7b054312163d0ce82b07d1b6e706251036898ce1684ef12e9", size = 1541983, upload-time = "2026-03-05T17:57:41.061Z" }, - { url = "https://files.pythonhosted.org/packages/78/c9/849a669b3b3bb8ac96005cdef04df4db658c33443a7fc704a6d4a2f07a56/brotlicffi-1.2.0.0-cp314-cp314t-win32.whl", hash = "sha256:a8079e8ecc32ecef728036a1d9b7105991ce6a5385cf51ee8c02297c90fb08c2", size = 349046, upload-time = "2026-03-05T17:57:42.76Z" }, - { url = "https://files.pythonhosted.org/packages/a4/25/09c0fd21cfc451fa38ad538f4d18d8be566746531f7f27143f63f8c45a9f/brotlicffi-1.2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:ca90c4266704ca0a94de8f101b4ec029624273380574e4cf19301acfa46c61a0", size = 385653, upload-time = "2026-03-05T17:57:44.224Z" }, - { url = "https://files.pythonhosted.org/packages/e4/df/a72b284d8c7bef0ed5756b41c2eb7d0219a1dd6ac6762f1c7bdbc31ef3af/brotlicffi-1.2.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:9458d08a7ccde8e3c0afedbf2c70a8263227a68dea5ab13590593f4c0a4fd5f4", size = 432340, upload-time = "2025-11-21T18:17:42.277Z" }, - { url = "https://files.pythonhosted.org/packages/74/2b/cc55a2d1d6fb4f5d458fba44a3d3f91fb4320aa14145799fd3a996af0686/brotlicffi-1.2.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:84e3d0020cf1bd8b8131f4a07819edee9f283721566fe044a20ec792ca8fd8b7", size = 1534002, upload-time = "2025-11-21T18:17:43.746Z" }, - { url = "https://files.pythonhosted.org/packages/e4/9c/d51486bf366fc7d6735f0e46b5b96ca58dc005b250263525a1eea3cd5d21/brotlicffi-1.2.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:33cfb408d0cff64cd50bef268c0fed397c46fbb53944aa37264148614a62e990", size = 1536547, upload-time = "2025-11-21T18:17:45.729Z" }, - { url = "https://files.pythonhosted.org/packages/1b/37/293a9a0a7caf17e6e657668bebb92dfe730305999fe8c0e2703b8888789c/brotlicffi-1.2.0.0-cp38-abi3-win32.whl", hash = "sha256:23e5c912fdc6fd37143203820230374d24babd078fc054e18070a647118158f6", size = 343085, upload-time = "2025-11-21T18:17:48.887Z" }, - { url = "https://files.pythonhosted.org/packages/07/6b/6e92009df3b8b7272f85a0992b306b61c34b7ea1c4776643746e61c380ac/brotlicffi-1.2.0.0-cp38-abi3-win_amd64.whl", hash = "sha256:f139a7cdfe4ae7859513067b736eb44d19fae1186f9e99370092f6915216451b", size = 378586, upload-time = "2025-11-21T18:17:50.531Z" }, -] - [[package]] name = "certifi" -version = "2026.1.4" +version = "2026.4.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/ee/6caf7a40c36a1220410afe15a1cc64993a1f864871f698c0f93acb72842a/certifi-2026.4.22.tar.gz", hash = "sha256:8d455352a37b71bf76a79caa83a3d6c25afee4a385d632127b6afb3963f1c580", size = 137077, upload-time = "2026-04-22T11:26:11.191Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, + { url = "https://files.pythonhosted.org/packages/22/30/7cd8fdcdfbc5b869528b079bfb76dcdf6056b1a2097a662e5e8c04f42965/certifi-2026.4.22-py3-none-any.whl", hash = "sha256:3cb2210c8f88ba2318d29b0388d1023c8492ff72ecdde4ebdaddbb13a31b1c4a", size = 135707, upload-time = "2026-04-22T11:26:09.372Z" }, ] [[package]] @@ -477,71 +538,87 @@ wheels = [ [[package]] name = "charset-normalizer" -version = "3.4.4" +version = "3.4.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/a1/67fe25fac3c7642725500a3f6cfe5821ad557c3abb11c9d20d12c7008d3e/charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5", size = 144271, upload-time = "2026-04-02T09:28:39.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, - { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, - { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, - { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, - { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, - { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, - { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, - { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, - { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, - { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, - { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, - { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, - { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, - { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, - { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, - { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, - { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, - { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, - { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, - { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, - { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, - { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, - { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, - { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, - { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, - { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, - { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, - { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, - { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, - { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, - { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, - { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, - { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, - { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, - { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, - { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, - { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, - { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, - { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, - { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, - { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, - { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, - { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, + { url = "https://files.pythonhosted.org/packages/0c/eb/4fc8d0a7110eb5fc9cc161723a34a8a6c200ce3b4fbf681bc86feee22308/charset_normalizer-3.4.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:eca9705049ad3c7345d574e3510665cb2cf844c2f2dcfe675332677f081cbd46", size = 311328, upload-time = "2026-04-02T09:26:24.331Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e3/0fadc706008ac9d7b9b5be6dc767c05f9d3e5df51744ce4cc9605de7b9f4/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6178f72c5508bfc5fd446a5905e698c6212932f25bcdd4b47a757a50605a90e2", size = 208061, upload-time = "2026-04-02T09:26:25.568Z" }, + { url = "https://files.pythonhosted.org/packages/42/f0/3dd1045c47f4a4604df85ec18ad093912ae1344ac706993aff91d38773a2/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1421b502d83040e6d7fb2fb18dff63957f720da3d77b2fbd3187ceb63755d7b", size = 229031, upload-time = "2026-04-02T09:26:26.865Z" }, + { url = "https://files.pythonhosted.org/packages/dc/67/675a46eb016118a2fbde5a277a5d15f4f69d5f3f5f338e5ee2f8948fcf43/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:edac0f1ab77644605be2cbba52e6b7f630731fc42b34cb0f634be1a6eface56a", size = 225239, upload-time = "2026-04-02T09:26:28.044Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f8/d0118a2f5f23b02cd166fa385c60f9b0d4f9194f574e2b31cef350ad7223/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5649fd1c7bade02f320a462fdefd0b4bd3ce036065836d4f42e0de958038e116", size = 216589, upload-time = "2026-04-02T09:26:29.239Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f1/6d2b0b261b6c4ceef0fcb0d17a01cc5bc53586c2d4796fa04b5c540bc13d/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:203104ed3e428044fd943bc4bf45fa73c0730391f9621e37fe39ecf477b128cb", size = 202733, upload-time = "2026-04-02T09:26:30.5Z" }, + { url = "https://files.pythonhosted.org/packages/6f/c0/7b1f943f7e87cc3db9626ba17807d042c38645f0a1d4415c7a14afb5591f/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:298930cec56029e05497a76988377cbd7457ba864beeea92ad7e844fe74cd1f1", size = 212652, upload-time = "2026-04-02T09:26:31.709Z" }, + { url = "https://files.pythonhosted.org/packages/38/dd/5a9ab159fe45c6e72079398f277b7d2b523e7f716acc489726115a910097/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:708838739abf24b2ceb208d0e22403dd018faeef86ddac04319a62ae884c4f15", size = 211229, upload-time = "2026-04-02T09:26:33.282Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ff/531a1cad5ca855d1c1a8b69cb71abfd6d85c0291580146fda7c82857caa1/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0f7eb884681e3938906ed0434f20c63046eacd0111c4ba96f27b76084cd679f5", size = 203552, upload-time = "2026-04-02T09:26:34.845Z" }, + { url = "https://files.pythonhosted.org/packages/c1/4c/a5fb52d528a8ca41f7598cb619409ece30a169fbdf9cdce592e53b46c3a6/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4dc1e73c36828f982bfe79fadf5919923f8a6f4df2860804db9a98c48824ce8d", size = 230806, upload-time = "2026-04-02T09:26:36.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/7a/071feed8124111a32b316b33ae4de83d36923039ef8cf48120266844285b/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:aed52fea0513bac0ccde438c188c8a471c4e0f457c2dd20cdbf6ea7a450046c7", size = 212316, upload-time = "2026-04-02T09:26:37.672Z" }, + { url = "https://files.pythonhosted.org/packages/fd/35/f7dba3994312d7ba508e041eaac39a36b120f32d4c8662b8814dab876431/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fea24543955a6a729c45a73fe90e08c743f0b3334bbf3201e6c4bc1b0c7fa464", size = 227274, upload-time = "2026-04-02T09:26:38.93Z" }, + { url = "https://files.pythonhosted.org/packages/8a/2d/a572df5c9204ab7688ec1edc895a73ebded3b023bb07364710b05dd1c9be/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb6d88045545b26da47aa879dd4a89a71d1dce0f0e549b1abcb31dfe4a8eac49", size = 218468, upload-time = "2026-04-02T09:26:40.17Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/890922a8b03a568ca2f336c36585a4713c55d4d67bf0f0c78924be6315ca/charset_normalizer-3.4.7-cp312-cp312-win32.whl", hash = "sha256:2257141f39fe65a3fdf38aeccae4b953e5f3b3324f4ff0daf9f15b8518666a2c", size = 148460, upload-time = "2026-04-02T09:26:41.416Z" }, + { url = "https://files.pythonhosted.org/packages/35/d9/0e7dffa06c5ab081f75b1b786f0aefc88365825dfcd0ac544bdb7b2b6853/charset_normalizer-3.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:5ed6ab538499c8644b8a3e18debabcd7ce684f3fa91cf867521a7a0279cab2d6", size = 159330, upload-time = "2026-04-02T09:26:42.554Z" }, + { url = "https://files.pythonhosted.org/packages/9e/5d/481bcc2a7c88ea6b0878c299547843b2521ccbc40980cb406267088bc701/charset_normalizer-3.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:56be790f86bfb2c98fb742ce566dfb4816e5a83384616ab59c49e0604d49c51d", size = 147828, upload-time = "2026-04-02T09:26:44.075Z" }, + { url = "https://files.pythonhosted.org/packages/c1/3b/66777e39d3ae1ddc77ee606be4ec6d8cbd4c801f65e5a1b6f2b11b8346dd/charset_normalizer-3.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f496c9c3cc02230093d8330875c4c3cdfc3b73612a5fd921c65d39cbcef08063", size = 309627, upload-time = "2026-04-02T09:26:45.198Z" }, + { url = "https://files.pythonhosted.org/packages/2e/4e/b7f84e617b4854ade48a1b7915c8ccfadeba444d2a18c291f696e37f0d3b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ea948db76d31190bf08bd371623927ee1339d5f2a0b4b1b4a4439a65298703c", size = 207008, upload-time = "2026-04-02T09:26:46.824Z" }, + { url = "https://files.pythonhosted.org/packages/c4/bb/ec73c0257c9e11b268f018f068f5d00aa0ef8c8b09f7753ebd5f2880e248/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a277ab8928b9f299723bc1a2dabb1265911b1a76341f90a510368ca44ad9ab66", size = 228303, upload-time = "2026-04-02T09:26:48.397Z" }, + { url = "https://files.pythonhosted.org/packages/85/fb/32d1f5033484494619f701e719429c69b766bfc4dbc61aa9e9c8c166528b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3bec022aec2c514d9cf199522a802bd007cd588ab17ab2525f20f9c34d067c18", size = 224282, upload-time = "2026-04-02T09:26:49.684Z" }, + { url = "https://files.pythonhosted.org/packages/fa/07/330e3a0dda4c404d6da83b327270906e9654a24f6c546dc886a0eb0ffb23/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e044c39e41b92c845bc815e5ae4230804e8e7bc29e399b0437d64222d92809dd", size = 215595, upload-time = "2026-04-02T09:26:50.915Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7c/fc890655786e423f02556e0216d4b8c6bcb6bdfa890160dc66bf52dee468/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:f495a1652cf3fbab2eb0639776dad966c2fb874d79d87ca07f9d5f059b8bd215", size = 201986, upload-time = "2026-04-02T09:26:52.197Z" }, + { url = "https://files.pythonhosted.org/packages/d8/97/bfb18b3db2aed3b90cf54dc292ad79fdd5ad65c4eae454099475cbeadd0d/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e712b419df8ba5e42b226c510472b37bd57b38e897d3eca5e8cfd410a29fa859", size = 211711, upload-time = "2026-04-02T09:26:53.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a5/a581c13798546a7fd557c82614a5c65a13df2157e9ad6373166d2a3e645d/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7804338df6fcc08105c7745f1502ba68d900f45fd770d5bdd5288ddccb8a42d8", size = 210036, upload-time = "2026-04-02T09:26:54.975Z" }, + { url = "https://files.pythonhosted.org/packages/8c/bf/b3ab5bcb478e4193d517644b0fb2bf5497fbceeaa7a1bc0f4d5b50953861/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:481551899c856c704d58119b5025793fa6730adda3571971af568f66d2424bb5", size = 202998, upload-time = "2026-04-02T09:26:56.303Z" }, + { url = "https://files.pythonhosted.org/packages/e7/4e/23efd79b65d314fa320ec6017b4b5834d5c12a58ba4610aa353af2e2f577/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f59099f9b66f0d7145115e6f80dd8b1d847176df89b234a5a6b3f00437aa0832", size = 230056, upload-time = "2026-04-02T09:26:57.554Z" }, + { url = "https://files.pythonhosted.org/packages/b9/9f/1e1941bc3f0e01df116e68dc37a55c4d249df5e6fa77f008841aef68264f/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:f59ad4c0e8f6bba240a9bb85504faa1ab438237199d4cce5f622761507b8f6a6", size = 211537, upload-time = "2026-04-02T09:26:58.843Z" }, + { url = "https://files.pythonhosted.org/packages/80/0f/088cbb3020d44428964a6c97fe1edfb1b9550396bf6d278330281e8b709c/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3dedcc22d73ec993f42055eff4fcfed9318d1eeb9a6606c55892a26964964e48", size = 226176, upload-time = "2026-04-02T09:27:00.437Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9f/130394f9bbe06f4f63e22641d32fc9b202b7e251c9aef4db044324dac493/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:64f02c6841d7d83f832cd97ccf8eb8a906d06eb95d5276069175c696b024b60a", size = 217723, upload-time = "2026-04-02T09:27:02.021Z" }, + { url = "https://files.pythonhosted.org/packages/73/55/c469897448a06e49f8fa03f6caae97074fde823f432a98f979cc42b90e69/charset_normalizer-3.4.7-cp313-cp313-win32.whl", hash = "sha256:4042d5c8f957e15221d423ba781e85d553722fc4113f523f2feb7b188cc34c5e", size = 148085, upload-time = "2026-04-02T09:27:03.192Z" }, + { url = "https://files.pythonhosted.org/packages/5d/78/1b74c5bbb3f99b77a1715c91b3e0b5bdb6fe302d95ace4f5b1bec37b0167/charset_normalizer-3.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:3946fa46a0cf3e4c8cb1cc52f56bb536310d34f25f01ca9b6c16afa767dab110", size = 158819, upload-time = "2026-04-02T09:27:04.454Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/46bd42279d323deb8687c4a5a811fd548cb7d1de10cf6535d099877a9a9f/charset_normalizer-3.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:80d04837f55fc81da168b98de4f4b797ef007fc8a79ab71c6ec9bc4dd662b15b", size = 147915, upload-time = "2026-04-02T09:27:05.971Z" }, + { url = "https://files.pythonhosted.org/packages/97/c8/c67cb8c70e19ef1960b97b22ed2a1567711de46c4ddf19799923adc836c2/charset_normalizer-3.4.7-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c36c333c39be2dbca264d7803333c896ab8fa7d4d6f0ab7edb7dfd7aea6e98c0", size = 309234, upload-time = "2026-04-02T09:27:07.194Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/c091fdee33f20de70d6c8b522743b6f831a2f1cd3ff86de4c6a827c48a76/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c2aed2e5e41f24ea8ef1590b8e848a79b56f3a5564a65ceec43c9d692dc7d8a", size = 208042, upload-time = "2026-04-02T09:27:08.749Z" }, + { url = "https://files.pythonhosted.org/packages/87/1c/ab2ce611b984d2fd5d86a5a8a19c1ae26acac6bad967da4967562c75114d/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54523e136b8948060c0fa0bc7b1b50c32c186f2fceee897a495406bb6e311d2b", size = 228706, upload-time = "2026-04-02T09:27:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a8/29/2b1d2cb00bf085f59d29eb773ce58ec2d325430f8c216804a0a5cd83cbca/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:715479b9a2802ecac752a3b0efa2b0b60285cf962ee38414211abdfccc233b41", size = 224727, upload-time = "2026-04-02T09:27:11.175Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/032c2d5a07fe4d4855fea851209cca2b6f03ebeb6d4e3afdb3358386a684/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd6c2a1c7573c64738d716488d2cdd3c00e340e4835707d8fdb8dc1a66ef164e", size = 215882, upload-time = "2026-04-02T09:27:12.446Z" }, + { url = "https://files.pythonhosted.org/packages/2c/c2/356065d5a8b78ed04499cae5f339f091946a6a74f91e03476c33f0ab7100/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:c45e9440fb78f8ddabcf714b68f936737a121355bf59f3907f4e17721b9d1aae", size = 200860, upload-time = "2026-04-02T09:27:13.721Z" }, + { url = "https://files.pythonhosted.org/packages/0c/cd/a32a84217ced5039f53b29f460962abb2d4420def55afabe45b1c3c7483d/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3534e7dcbdcf757da6b85a0bbf5b6868786d5982dd959b065e65481644817a18", size = 211564, upload-time = "2026-04-02T09:27:15.272Z" }, + { url = "https://files.pythonhosted.org/packages/44/86/58e6f13ce26cc3b8f4a36b94a0f22ae2f00a72534520f4ae6857c4b81f89/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e8ac484bf18ce6975760921bb6148041faa8fef0547200386ea0b52b5d27bf7b", size = 211276, upload-time = "2026-04-02T09:27:16.834Z" }, + { url = "https://files.pythonhosted.org/packages/8f/fe/d17c32dc72e17e155e06883efa84514ca375f8a528ba2546bee73fc4df81/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a5fe03b42827c13cdccd08e6c0247b6a6d4b5e3cdc53fd1749f5896adcdc2356", size = 201238, upload-time = "2026-04-02T09:27:18.229Z" }, + { url = "https://files.pythonhosted.org/packages/6a/29/f33daa50b06525a237451cdb6c69da366c381a3dadcd833fa5676bc468b3/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:2d6eb928e13016cea4f1f21d1e10c1cebd5a421bc57ddf5b1142ae3f86824fab", size = 230189, upload-time = "2026-04-02T09:27:19.445Z" }, + { url = "https://files.pythonhosted.org/packages/b6/6e/52c84015394a6a0bdcd435210a7e944c5f94ea1055f5cc5d56c5fe368e7b/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e74327fb75de8986940def6e8dee4f127cc9752bee7355bb323cc5b2659b6d46", size = 211352, upload-time = "2026-04-02T09:27:20.79Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d7/4353be581b373033fb9198bf1da3cf8f09c1082561e8e922aa7b39bf9fe8/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d6038d37043bced98a66e68d3aa2b6a35505dc01328cd65217cefe82f25def44", size = 227024, upload-time = "2026-04-02T09:27:22.063Z" }, + { url = "https://files.pythonhosted.org/packages/30/45/99d18aa925bd1740098ccd3060e238e21115fffbfdcb8f3ece837d0ace6c/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7579e913a5339fb8fa133f6bbcfd8e6749696206cf05acdbdca71a1b436d8e72", size = 217869, upload-time = "2026-04-02T09:27:23.486Z" }, + { url = "https://files.pythonhosted.org/packages/5c/05/5ee478aa53f4bb7996482153d4bfe1b89e0f087f0ab6b294fcf92d595873/charset_normalizer-3.4.7-cp314-cp314-win32.whl", hash = "sha256:5b77459df20e08151cd6f8b9ef8ef1f961ef73d85c21a555c7eed5b79410ec10", size = 148541, upload-time = "2026-04-02T09:27:25.146Z" }, + { url = "https://files.pythonhosted.org/packages/48/77/72dcb0921b2ce86420b2d79d454c7022bf5be40202a2a07906b9f2a35c97/charset_normalizer-3.4.7-cp314-cp314-win_amd64.whl", hash = "sha256:92a0a01ead5e668468e952e4238cccd7c537364eb7d851ab144ab6627dbbe12f", size = 159634, upload-time = "2026-04-02T09:27:26.642Z" }, + { url = "https://files.pythonhosted.org/packages/c6/a3/c2369911cd72f02386e4e340770f6e158c7980267da16af8f668217abaa0/charset_normalizer-3.4.7-cp314-cp314-win_arm64.whl", hash = "sha256:67f6279d125ca0046a7fd386d01b311c6363844deac3e5b069b514ba3e63c246", size = 148384, upload-time = "2026-04-02T09:27:28.271Z" }, + { url = "https://files.pythonhosted.org/packages/94/09/7e8a7f73d24dba1f0035fbbf014d2c36828fc1bf9c88f84093e57d315935/charset_normalizer-3.4.7-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:effc3f449787117233702311a1b7d8f59cba9ced946ba727bdc329ec69028e24", size = 330133, upload-time = "2026-04-02T09:27:29.474Z" }, + { url = "https://files.pythonhosted.org/packages/8d/da/96975ddb11f8e977f706f45cddd8540fd8242f71ecdb5d18a80723dcf62c/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fbccdc05410c9ee21bbf16a35f4c1d16123dcdeb8a1d38f33654fa21d0234f79", size = 216257, upload-time = "2026-04-02T09:27:30.793Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/1d63bf8ef2d388e95c64b2098f45f84758f6d102a087552da1485912637b/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:733784b6d6def852c814bce5f318d25da2ee65dd4839a0718641c696e09a2960", size = 234851, upload-time = "2026-04-02T09:27:32.44Z" }, + { url = "https://files.pythonhosted.org/packages/9b/40/e5ff04233e70da2681fa43969ad6f66ca5611d7e669be0246c4c7aaf6dc8/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a89c23ef8d2c6b27fd200a42aa4ac72786e7c60d40efdc76e6011260b6e949c4", size = 233393, upload-time = "2026-04-02T09:27:34.03Z" }, + { url = "https://files.pythonhosted.org/packages/be/c1/06c6c49d5a5450f76899992f1ee40b41d076aee9279b49cf9974d2f313d5/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c114670c45346afedc0d947faf3c7f701051d2518b943679c8ff88befe14f8e", size = 223251, upload-time = "2026-04-02T09:27:35.369Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/f2ff16fb050946169e3e1f82134d107e5d4ae72647ec8a1b1446c148480f/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:a180c5e59792af262bf263b21a3c49353f25945d8d9f70628e73de370d55e1e1", size = 206609, upload-time = "2026-04-02T09:27:36.661Z" }, + { url = "https://files.pythonhosted.org/packages/69/d5/a527c0cd8d64d2eab7459784fb4169a0ac76e5a6fc5237337982fd61347e/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3c9a494bc5ec77d43cea229c4f6db1e4d8fe7e1bbffa8b6f0f0032430ff8ab44", size = 220014, upload-time = "2026-04-02T09:27:38.019Z" }, + { url = "https://files.pythonhosted.org/packages/7e/80/8a7b8104a3e203074dc9aa2c613d4b726c0e136bad1cc734594b02867972/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8d828b6667a32a728a1ad1d93957cdf37489c57b97ae6c4de2860fa749b8fc1e", size = 218979, upload-time = "2026-04-02T09:27:39.37Z" }, + { url = "https://files.pythonhosted.org/packages/02/9a/b759b503d507f375b2b5c153e4d2ee0a75aa215b7f2489cf314f4541f2c0/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:cf1493cd8607bec4d8a7b9b004e699fcf8f9103a9284cc94962cb73d20f9d4a3", size = 209238, upload-time = "2026-04-02T09:27:40.722Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/0f3f5d47b86bdb79256e7290b26ac847a2832d9a4033f7eb2cd4bcf4bb5b/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0c96c3b819b5c3e9e165495db84d41914d6894d55181d2d108cc1a69bfc9cce0", size = 236110, upload-time = "2026-04-02T09:27:42.33Z" }, + { url = "https://files.pythonhosted.org/packages/96/23/bce28734eb3ed2c91dcf93abeb8a5cf393a7b2749725030bb630e554fdd8/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:752a45dc4a6934060b3b0dab47e04edc3326575f82be64bc4fc293914566503e", size = 219824, upload-time = "2026-04-02T09:27:43.924Z" }, + { url = "https://files.pythonhosted.org/packages/2c/6f/6e897c6984cc4d41af319b077f2f600fc8214eb2fe2d6bcb79141b882400/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:8778f0c7a52e56f75d12dae53ae320fae900a8b9b4164b981b9c5ce059cd1fcb", size = 233103, upload-time = "2026-04-02T09:27:45.348Z" }, + { url = "https://files.pythonhosted.org/packages/76/22/ef7bd0fe480a0ae9b656189ec00744b60933f68b4f42a7bb06589f6f576a/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ce3412fbe1e31eb81ea42f4169ed94861c56e643189e1e75f0041f3fe7020abe", size = 225194, upload-time = "2026-04-02T09:27:46.706Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/0e0ab3e0b5bc1219bd80a6a0d4d72ca74d9250cb2382b7c699c147e06017/charset_normalizer-3.4.7-cp314-cp314t-win32.whl", hash = "sha256:c03a41a8784091e67a39648f70c5f97b5b6a37f216896d44d2cdcb82615339a0", size = 159827, upload-time = "2026-04-02T09:27:48.053Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1d/29d32e0fb40864b1f878c7f5a0b343ae676c6e2b271a2d55cc3a152391da/charset_normalizer-3.4.7-cp314-cp314t-win_amd64.whl", hash = "sha256:03853ed82eeebbce3c2abfdbc98c96dc205f32a79627688ac9a27370ea61a49c", size = 174168, upload-time = "2026-04-02T09:27:49.795Z" }, + { url = "https://files.pythonhosted.org/packages/de/32/d92444ad05c7a6e41fb2036749777c163baf7a0301a040cb672d6b2b1ae9/charset_normalizer-3.4.7-cp314-cp314t-win_arm64.whl", hash = "sha256:c35abb8bfff0185efac5878da64c45dafd2b37fb0383add1be155a763c1f083d", size = 153018, upload-time = "2026-04-02T09:27:51.116Z" }, + { url = "https://files.pythonhosted.org/packages/db/8f/61959034484a4a7c527811f4721e75d02d653a35afb0b6054474d8185d4c/charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d", size = 61958, upload-time = "2026-04-02T09:28:37.794Z" }, ] [[package]] name = "click" -version = "8.3.1" +version = "8.3.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/63/f9e1ea081ce35720d8b92acde70daaedace594dc93b693c869e0d5910718/click-8.3.3.tar.gz", hash = "sha256:398329ad4837b2ff7cbe1dd166a4c0f8900c3ca3a218de04466f38f6497f18a2", size = 328061, upload-time = "2026-04-22T15:11:27.506Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, + { url = "https://files.pythonhosted.org/packages/ae/44/c1221527f6a71a01ec6fbad7fa78f1d50dfa02217385cf0fa3eec7087d59/click-8.3.3-py3-none-any.whl", hash = "sha256:a2bf429bb3033c89fa4936ffb35d5cb471e3719e1f3c8a7c3fff0b8314305613", size = 110502, upload-time = "2026-04-22T15:11:25.044Z" }, ] [[package]] @@ -585,84 +662,81 @@ wheels = [ [[package]] name = "croniter" -version = "6.0.0" +version = "6.2.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "python-dateutil" }, - { name = "pytz" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/2f/44d1ae153a0e27be56be43465e5cb39b9650c781e001e7864389deb25090/croniter-6.0.0.tar.gz", hash = "sha256:37c504b313956114a983ece2c2b07790b1f1094fe9d81cc94739214748255577", size = 64481, upload-time = "2024-12-17T17:17:47.32Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/de/5832661ed55107b8a09af3f0a2e71e0957226a59eb1dcf0a445cce6daf20/croniter-6.2.2.tar.gz", hash = "sha256:ba60832a5ec8e12e51b8691c3309a113d1cf6526bdf1a48150ce8ec7a532d0ab", size = 113762, upload-time = "2026-03-15T08:43:48.112Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/4b/290b4c3efd6417a8b0c284896de19b1d5855e6dbdb97d2a35e68fa42de85/croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368", size = 25468, upload-time = "2024-12-17T17:17:45.359Z" }, + { url = "https://files.pythonhosted.org/packages/d0/39/783980e78cb92c2d7bdb1fc7dbc86e94ccc6d58224d76a7f1f51b6c51e30/croniter-6.2.2-py3-none-any.whl", hash = "sha256:a5d17b1060974d36251ea4faf388233eca8acf0d09cbd92d35f4c4ac8f279960", size = 45422, upload-time = "2026-03-15T08:43:46.626Z" }, ] [[package]] name = "cryptography" -version = "46.0.5" +version = "46.0.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } +sdist = { url = "https://files.pythonhosted.org/packages/47/93/ac8f3d5ff04d54bc814e961a43ae5b0b146154c89c61b47bb07557679b18/cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5", size = 750652, upload-time = "2026-04-08T01:57:54.692Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, - { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, - { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, - { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, - { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, - { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, - { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, - { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, - { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, - { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, - { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, - { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, - { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, - { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" }, - { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, - { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, - { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, - { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, - { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, - { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, - { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, - { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, - { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" }, - { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" }, - { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, - { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, - { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, - { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, - { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, - { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, - { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, - { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, - { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, - { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, - { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, - { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, - { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, - { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, + { url = "https://files.pythonhosted.org/packages/0b/5d/4a8f770695d73be252331e60e526291e3df0c9b27556a90a6b47bccca4c2/cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4", size = 7179869, upload-time = "2026-04-08T01:56:17.157Z" }, + { url = "https://files.pythonhosted.org/packages/5f/45/6d80dc379b0bbc1f9d1e429f42e4cb9e1d319c7a8201beffd967c516ea01/cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325", size = 4275492, upload-time = "2026-04-08T01:56:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9a/1765afe9f572e239c3469f2cb429f3ba7b31878c893b246b4b2994ffe2fe/cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308", size = 4426670, upload-time = "2026-04-08T01:56:21.415Z" }, + { url = "https://files.pythonhosted.org/packages/8f/3e/af9246aaf23cd4ee060699adab1e47ced3f5f7e7a8ffdd339f817b446462/cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77", size = 4280275, upload-time = "2026-04-08T01:56:23.539Z" }, + { url = "https://files.pythonhosted.org/packages/0f/54/6bbbfc5efe86f9d71041827b793c24811a017c6ac0fd12883e4caa86b8ed/cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1", size = 4928402, upload-time = "2026-04-08T01:56:25.624Z" }, + { url = "https://files.pythonhosted.org/packages/2d/cf/054b9d8220f81509939599c8bdbc0c408dbd2bdd41688616a20731371fe0/cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef", size = 4459985, upload-time = "2026-04-08T01:56:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/f9/46/4e4e9c6040fb01c7467d47217d2f882daddeb8828f7df800cb806d8a2288/cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de", size = 3990652, upload-time = "2026-04-08T01:56:29.095Z" }, + { url = "https://files.pythonhosted.org/packages/36/5f/313586c3be5a2fbe87e4c9a254207b860155a8e1f3cca99f9910008e7d08/cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83", size = 4279805, upload-time = "2026-04-08T01:56:30.928Z" }, + { url = "https://files.pythonhosted.org/packages/69/33/60dfc4595f334a2082749673386a4d05e4f0cf4df8248e63b2c3437585f2/cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb", size = 4892883, upload-time = "2026-04-08T01:56:32.614Z" }, + { url = "https://files.pythonhosted.org/packages/c7/0b/333ddab4270c4f5b972f980adef4faa66951a4aaf646ca067af597f15563/cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b", size = 4459756, upload-time = "2026-04-08T01:56:34.306Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/633913398b43b75f1234834170947957c6b623d1701ffc7a9600da907e89/cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85", size = 4410244, upload-time = "2026-04-08T01:56:35.977Z" }, + { url = "https://files.pythonhosted.org/packages/10/f2/19ceb3b3dc14009373432af0c13f46aa08e3ce334ec6eff13492e1812ccd/cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e", size = 4674868, upload-time = "2026-04-08T01:56:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/1a/bb/a5c213c19ee94b15dfccc48f363738633a493812687f5567addbcbba9f6f/cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457", size = 3026504, upload-time = "2026-04-08T01:56:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/2b/02/7788f9fefa1d060ca68717c3901ae7fffa21ee087a90b7f23c7a603c32ae/cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b", size = 3488363, upload-time = "2026-04-08T01:56:41.893Z" }, + { url = "https://files.pythonhosted.org/packages/7b/56/15619b210e689c5403bb0540e4cb7dbf11a6bf42e483b7644e471a2812b3/cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842", size = 7119671, upload-time = "2026-04-08T01:56:44Z" }, + { url = "https://files.pythonhosted.org/packages/74/66/e3ce040721b0b5599e175ba91ab08884c75928fbeb74597dd10ef13505d2/cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c", size = 4268551, upload-time = "2026-04-08T01:56:46.071Z" }, + { url = "https://files.pythonhosted.org/packages/03/11/5e395f961d6868269835dee1bafec6a1ac176505a167f68b7d8818431068/cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902", size = 4408887, upload-time = "2026-04-08T01:56:47.718Z" }, + { url = "https://files.pythonhosted.org/packages/40/53/8ed1cf4c3b9c8e611e7122fb56f1c32d09e1fff0f1d77e78d9ff7c82653e/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d", size = 4271354, upload-time = "2026-04-08T01:56:49.312Z" }, + { url = "https://files.pythonhosted.org/packages/50/46/cf71e26025c2e767c5609162c866a78e8a2915bbcfa408b7ca495c6140c4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022", size = 4905845, upload-time = "2026-04-08T01:56:50.916Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ea/01276740375bac6249d0a971ebdf6b4dc9ead0ee0a34ef3b5a88c1a9b0d4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce", size = 4444641, upload-time = "2026-04-08T01:56:52.882Z" }, + { url = "https://files.pythonhosted.org/packages/3d/4c/7d258f169ae71230f25d9f3d06caabcff8c3baf0978e2b7d65e0acac3827/cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f", size = 3967749, upload-time = "2026-04-08T01:56:54.597Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/2ea0767cad19e71b3530e4cad9605d0b5e338b6a1e72c37c9c1ceb86c333/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99", size = 4270942, upload-time = "2026-04-08T01:56:56.416Z" }, + { url = "https://files.pythonhosted.org/packages/41/3d/fe14df95a83319af25717677e956567a105bb6ab25641acaa093db79975d/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1", size = 4871079, upload-time = "2026-04-08T01:56:58.31Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/4a479e0f36f8f378d397f4eab4c850b4ffb79a2f0d58704b8fa0703ddc11/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2", size = 4443999, upload-time = "2026-04-08T01:57:00.508Z" }, + { url = "https://files.pythonhosted.org/packages/28/17/b59a741645822ec6d04732b43c5d35e4ef58be7bfa84a81e5ae6f05a1d33/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e", size = 4399191, upload-time = "2026-04-08T01:57:02.654Z" }, + { url = "https://files.pythonhosted.org/packages/59/6a/bb2e166d6d0e0955f1e9ff70f10ec4b2824c9cfcdb4da772c7dd69cc7d80/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee", size = 4655782, upload-time = "2026-04-08T01:57:04.592Z" }, + { url = "https://files.pythonhosted.org/packages/95/b6/3da51d48415bcb63b00dc17c2eff3a651b7c4fed484308d0f19b30e8cb2c/cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298", size = 3002227, upload-time = "2026-04-08T01:57:06.91Z" }, + { url = "https://files.pythonhosted.org/packages/32/a8/9f0e4ed57ec9cebe506e58db11ae472972ecb0c659e4d52bbaee80ca340a/cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb", size = 3475332, upload-time = "2026-04-08T01:57:08.807Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7f/cd42fc3614386bc0c12f0cb3c4ae1fc2bbca5c9662dfed031514911d513d/cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4", size = 7165618, upload-time = "2026-04-08T01:57:10.645Z" }, + { url = "https://files.pythonhosted.org/packages/a5/d0/36a49f0262d2319139d2829f773f1b97ef8aef7f97e6e5bd21455e5a8fb5/cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7", size = 4270628, upload-time = "2026-04-08T01:57:12.885Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6c/1a42450f464dda6ffbe578a911f773e54dd48c10f9895a23a7e88b3e7db5/cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832", size = 4415405, upload-time = "2026-04-08T01:57:14.923Z" }, + { url = "https://files.pythonhosted.org/packages/9a/92/4ed714dbe93a066dc1f4b4581a464d2d7dbec9046f7c8b7016f5286329e2/cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163", size = 4272715, upload-time = "2026-04-08T01:57:16.638Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e6/a26b84096eddd51494bba19111f8fffe976f6a09f132706f8f1bf03f51f7/cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2", size = 4918400, upload-time = "2026-04-08T01:57:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/c7/08/ffd537b605568a148543ac3c2b239708ae0bd635064bab41359252ef88ed/cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067", size = 4450634, upload-time = "2026-04-08T01:57:21.185Z" }, + { url = "https://files.pythonhosted.org/packages/16/01/0cd51dd86ab5b9befe0d031e276510491976c3a80e9f6e31810cce46c4ad/cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0", size = 3985233, upload-time = "2026-04-08T01:57:22.862Z" }, + { url = "https://files.pythonhosted.org/packages/92/49/819d6ed3a7d9349c2939f81b500a738cb733ab62fbecdbc1e38e83d45e12/cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba", size = 4271955, upload-time = "2026-04-08T01:57:24.814Z" }, + { url = "https://files.pythonhosted.org/packages/80/07/ad9b3c56ebb95ed2473d46df0847357e01583f4c52a85754d1a55e29e4d0/cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006", size = 4879888, upload-time = "2026-04-08T01:57:26.88Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c7/201d3d58f30c4c2bdbe9b03844c291feb77c20511cc3586daf7edc12a47b/cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0", size = 4449961, upload-time = "2026-04-08T01:57:29.068Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ef/649750cbf96f3033c3c976e112265c33906f8e462291a33d77f90356548c/cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85", size = 4401696, upload-time = "2026-04-08T01:57:31.029Z" }, + { url = "https://files.pythonhosted.org/packages/41/52/a8908dcb1a389a459a29008c29966c1d552588d4ae6d43f3a1a4512e0ebe/cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e", size = 4664256, upload-time = "2026-04-08T01:57:33.144Z" }, + { url = "https://files.pythonhosted.org/packages/4b/fa/f0ab06238e899cc3fb332623f337a7364f36f4bb3f2534c2bb95a35b132c/cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246", size = 3013001, upload-time = "2026-04-08T01:57:34.933Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f1/00ce3bde3ca542d1acd8f8cfa38e446840945aa6363f9b74746394b14127/cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3", size = 3472985, upload-time = "2026-04-08T01:57:36.714Z" }, ] [[package]] name = "ddgs" -version = "9.10.0" +version = "9.14.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, - { name = "fake-useragent" }, - { name = "httpx", extra = ["brotli", "http2", "socks"] }, { name = "lxml" }, { name = "primp" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/07/76/8dc0323d1577037abad7a679f8af150ebb73a94995d3012de71a8898e6e6/ddgs-9.10.0.tar.gz", hash = "sha256:d9381ff75bdf1ad6691d3d1dc2be12be190d1d32ecd24f1002c492143c52c34f", size = 31491, upload-time = "2025-12-17T23:30:15.021Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/f2/aa1f5af106ea0ef0351d11a2fe05d28618463160137326eeb3073b7d788b/ddgs-9.14.1.tar.gz", hash = "sha256:85b878225a622ba145aff33c0f2f0dceb90d6cfaa291af253021d10cb261a8bb", size = 57157, upload-time = "2026-04-20T12:09:21.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/0e/d4b7d6a8df5074cf67bc14adead39955b0bf847c947ff6cad0bb527887f4/ddgs-9.10.0-py3-none-any.whl", hash = "sha256:81233d79309836eb03e7df2a0d2697adc83c47c342713132c0ba618f1f2c6eee", size = 40311, upload-time = "2025-12-17T23:30:13.606Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a0/c0b568acd6ec819ec94ecfd4eebd00edc855efab06e589ad17d0412ff4ce/ddgs-9.14.1-py3-none-any.whl", hash = "sha256:e6b853be092532add9c0d611c4b121f0b27092de66756401057c2100f6b1ab44", size = 67019, upload-time = "2026-04-20T12:09:19.867Z" }, ] [[package]] @@ -670,12 +744,16 @@ name = "deer-flow" version = "0.1.0" source = { virtual = "." } dependencies = [ + { name = "bcrypt" }, { name = "deerflow-harness" }, + { name = "dingtalk-stream" }, + { name = "email-validator" }, { name = "fastapi" }, { name = "httpx" }, { name = "langgraph-sdk" }, { name = "lark-oapi" }, { name = "markdown-to-mrkdwn" }, + { name = "pyjwt" }, { name = "python-multipart" }, { name = "python-telegram-bot" }, { name = "slack-sdk" }, @@ -684,20 +762,32 @@ dependencies = [ { name = "wecom-aibot-python-sdk" }, ] +[package.optional-dependencies] +postgres = [ + { name = "deerflow-harness", extra = ["postgres"] }, +] + [package.dev-dependencies] dev = [ + { name = "prompt-toolkit" }, { name = "pytest" }, + { name = "pytest-asyncio" }, { name = "ruff" }, ] [package.metadata] requires-dist = [ + { name = "bcrypt", specifier = ">=4.0.0" }, { name = "deerflow-harness", editable = "packages/harness" }, + { name = "deerflow-harness", extras = ["postgres"], marker = "extra == 'postgres'", editable = "packages/harness" }, + { name = "dingtalk-stream", specifier = ">=0.24.3" }, + { name = "email-validator", specifier = ">=2.0.0" }, { name = "fastapi", specifier = ">=0.115.0" }, { name = "httpx", specifier = ">=0.28.0" }, { name = "langgraph-sdk", specifier = ">=0.1.51" }, { name = "lark-oapi", specifier = ">=1.4.0" }, { name = "markdown-to-mrkdwn", specifier = ">=0.3.1" }, + { name = "pyjwt", specifier = ">=2.9.0" }, { name = "python-multipart", specifier = ">=0.0.26" }, { name = "python-telegram-bot", specifier = ">=21.0" }, { name = "slack-sdk", specifier = ">=3.33.0" }, @@ -705,10 +795,13 @@ requires-dist = [ { name = "uvicorn", extras = ["standard"], specifier = ">=0.34.0" }, { name = "wecom-aibot-python-sdk", specifier = ">=0.1.6" }, ] +provides-extras = ["postgres"] [package.metadata.requires-dev] dev = [ + { name = "prompt-toolkit", specifier = ">=3.0.0" }, { name = "pytest", specifier = ">=9.0.3" }, + { name = "pytest-asyncio", specifier = ">=1.3.0" }, { name = "ruff", specifier = ">=0.14.11" }, ] @@ -719,6 +812,8 @@ source = { editable = "packages/harness" } dependencies = [ { name = "agent-client-protocol" }, { name = "agent-sandbox" }, + { name = "aiosqlite" }, + { name = "alembic" }, { name = "ddgs" }, { name = "dotenv" }, { name = "duckdb" }, @@ -744,6 +839,7 @@ dependencies = [ { name = "pydantic" }, { name = "pyyaml" }, { name = "readabilipy" }, + { name = "sqlalchemy", extra = ["asyncio"] }, { name = "tavily-python" }, { name = "tiktoken" }, ] @@ -752,6 +848,12 @@ dependencies = [ ollama = [ { name = "langchain-ollama" }, ] +postgres = [ + { name = "asyncpg" }, + { name = "langgraph-checkpoint-postgres" }, + { name = "psycopg", extra = ["binary"] }, + { name = "psycopg-pool" }, +] pymupdf = [ { name = "pymupdf4llm" }, ] @@ -760,6 +862,9 @@ pymupdf = [ requires-dist = [ { name = "agent-client-protocol", specifier = ">=0.4.0" }, { name = "agent-sandbox", specifier = ">=0.0.19" }, + { name = "aiosqlite", specifier = ">=0.19" }, + { name = "alembic", specifier = ">=1.13" }, + { name = "asyncpg", marker = "extra == 'postgres'", specifier = ">=0.29" }, { name = "ddgs", specifier = ">=9.10.0" }, { name = "dotenv", specifier = ">=0.9.9" }, { name = "duckdb", specifier = ">=1.4.4" }, @@ -767,30 +872,34 @@ requires-dist = [ { name = "firecrawl-py", specifier = ">=1.15.0" }, { name = "httpx", specifier = ">=0.28.0" }, { name = "kubernetes", specifier = ">=30.0.0" }, - { name = "langchain", specifier = ">=1.2.3" }, - { name = "langchain-anthropic", specifier = ">=1.3.4" }, + { name = "langchain", specifier = ">=1.2.15" }, + { name = "langchain-anthropic", specifier = ">=1.4.1" }, { name = "langchain-deepseek", specifier = ">=1.0.1" }, { name = "langchain-google-genai", specifier = ">=4.2.1" }, - { name = "langchain-mcp-adapters", specifier = ">=0.1.0" }, + { name = "langchain-mcp-adapters", specifier = ">=0.2.2" }, { name = "langchain-ollama", marker = "extra == 'ollama'", specifier = ">=0.3.0" }, - { name = "langchain-openai", specifier = ">=1.1.7" }, + { name = "langchain-openai", specifier = ">=1.2.1" }, { name = "langfuse", specifier = ">=3.4.1" }, - { name = "langgraph", specifier = ">=1.0.6,<1.0.10" }, - { name = "langgraph-api", specifier = ">=0.7.0,<0.8.0" }, + { name = "langgraph", specifier = ">=1.1.9" }, + { name = "langgraph-api", specifier = ">=0.8.1" }, + { name = "langgraph-checkpoint-postgres", marker = "extra == 'postgres'", specifier = ">=3.0.5" }, { name = "langgraph-checkpoint-sqlite", specifier = ">=3.0.3" }, - { name = "langgraph-cli", specifier = ">=0.4.14" }, - { name = "langgraph-runtime-inmem", specifier = ">=0.22.1" }, + { name = "langgraph-cli", specifier = ">=0.4.24" }, + { name = "langgraph-runtime-inmem", specifier = ">=0.28.0" }, { name = "langgraph-sdk", specifier = ">=0.1.51" }, { name = "markdownify", specifier = ">=1.2.2" }, { name = "markitdown", extras = ["all", "xlsx"], specifier = ">=0.0.1a2" }, + { name = "psycopg", extras = ["binary"], marker = "extra == 'postgres'", specifier = ">=3.3.3" }, + { name = "psycopg-pool", marker = "extra == 'postgres'", specifier = ">=3.3.0" }, { name = "pydantic", specifier = ">=2.12.5" }, { name = "pymupdf4llm", marker = "extra == 'pymupdf'", specifier = ">=0.0.17" }, { name = "pyyaml", specifier = ">=6.0.3" }, { name = "readabilipy", specifier = ">=0.3.0" }, + { name = "sqlalchemy", extras = ["asyncio"], specifier = ">=2.0,<3.0" }, { name = "tavily-python", specifier = ">=0.7.17" }, { name = "tiktoken", specifier = ">=0.8.0" }, ] -provides-extras = ["ollama", "pymupdf"] +provides-extras = ["ollama", "postgres", "pymupdf"] [[package]] name = "defusedxml" @@ -801,6 +910,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, ] +[[package]] +name = "dingtalk-stream" +version = "0.24.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "requests" }, + { name = "websockets" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/44/102dede3f371277598df6aa9725b82e3add068c729333c7a5dbc12764579/dingtalk_stream-0.24.3-py3-none-any.whl", hash = "sha256:2160403656985962878bf60cdf5adf41619f21067348e06f07a7c7eebf5943ad", size = 27813, upload-time = "2025-10-24T09:36:57.497Z" }, +] + [[package]] name = "distro" version = "1.9.0" @@ -811,12 +933,21 @@ wheels = [ ] [[package]] -name = "docstring-parser" -version = "0.17.0" +name = "dnspython" +version = "2.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, + { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, +] + +[[package]] +name = "docstring-parser" +version = "0.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/4d/f332313098c1de1b2d2ff91cf2674415cc7cddab2ca1b01ae29774bd5fdf/docstring_parser-0.18.0.tar.gz", hash = "sha256:292510982205c12b1248696f44959db3cdd1740237a968ea1e2e7a900eeb2015", size = 29341, upload-time = "2026-04-14T04:09:19.867Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/5f/ed01f9a3cdffbd5a008556fc7b2a08ddb1cc6ace7effa7340604b1d16699/docstring_parser-0.18.0-py3-none-any.whl", hash = "sha256:b3fcbed555c47d8479be0796ef7e19c2670d428d72e96da63f3a40122860374b", size = 22484, upload-time = "2026-04-14T04:09:18.638Z" }, ] [[package]] @@ -832,31 +963,31 @@ wheels = [ [[package]] name = "duckdb" -version = "1.4.4" +version = "1.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/36/9d/ab66a06e416d71b7bdcb9904cdf8d4db3379ef632bb8e9495646702d9718/duckdb-1.4.4.tar.gz", hash = "sha256:8bba52fd2acb67668a4615ee17ee51814124223de836d9e2fdcbc4c9021b3d3c", size = 18419763, upload-time = "2026-01-26T11:50:37.68Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/66/744b4931b799a42f8cb9bc7a6f169e7b8e51195b62b246db407fd90bf15f/duckdb-1.5.2.tar.gz", hash = "sha256:638da0d5102b6cb6f7d47f83d0600708ac1d3cb46c5e9aaabc845f9ba4d69246", size = 18017166, upload-time = "2026-04-13T11:30:09.065Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/33/beadaa69f8458afe466126f2c5ee48c4759cc9d5d784f8703d44e0b52c3c/duckdb-1.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ddcfd9c6ff234da603a1edd5fd8ae6107f4d042f74951b65f91bc5e2643856b3", size = 28896535, upload-time = "2026-01-26T11:49:21.232Z" }, - { url = "https://files.pythonhosted.org/packages/76/66/82413f386df10467affc87f65bac095b7c88dbd9c767584164d5f4dc4cb8/duckdb-1.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6792ca647216bd5c4ff16396e4591cfa9b4a72e5ad7cdd312cec6d67e8431a7c", size = 15349716, upload-time = "2026-01-26T11:49:23.989Z" }, - { url = "https://files.pythonhosted.org/packages/5d/8c/c13d396fd4e9bf970916dc5b4fea410c1b10fe531069aea65f1dcf849a71/duckdb-1.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1f8d55843cc940e36261689054f7dfb6ce35b1f5b0953b0d355b6adb654b0d52", size = 13672403, upload-time = "2026-01-26T11:49:26.741Z" }, - { url = "https://files.pythonhosted.org/packages/db/77/2446a0b44226bb95217748d911c7ca66a66ca10f6481d5178d9370819631/duckdb-1.4.4-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c65d15c440c31e06baaebfd2c06d71ce877e132779d309f1edf0a85d23c07e92", size = 18419001, upload-time = "2026-01-26T11:49:29.353Z" }, - { url = "https://files.pythonhosted.org/packages/2e/a3/97715bba30040572fb15d02c26f36be988d48bc00501e7ac02b1d65ef9d0/duckdb-1.4.4-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b297eff642503fd435a9de5a9cb7db4eccb6f61d61a55b30d2636023f149855f", size = 20437385, upload-time = "2026-01-26T11:49:32.302Z" }, - { url = "https://files.pythonhosted.org/packages/8b/0a/18b9167adf528cbe3867ef8a84a5f19f37bedccb606a8a9e59cfea1880c8/duckdb-1.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d525de5f282b03aa8be6db86b1abffdceae5f1055113a03d5b50cd2fb8cf2ef8", size = 12267343, upload-time = "2026-01-26T11:49:34.985Z" }, - { url = "https://files.pythonhosted.org/packages/f8/15/37af97f5717818f3d82d57414299c293b321ac83e048c0a90bb8b6a09072/duckdb-1.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:50f2eb173c573811b44aba51176da7a4e5c487113982be6a6a1c37337ec5fa57", size = 13007490, upload-time = "2026-01-26T11:49:37.413Z" }, - { url = "https://files.pythonhosted.org/packages/7f/fe/64810fee20030f2bf96ce28b527060564864ce5b934b50888eda2cbf99dd/duckdb-1.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:337f8b24e89bc2e12dadcfe87b4eb1c00fd920f68ab07bc9b70960d6523b8bc3", size = 28899349, upload-time = "2026-01-26T11:49:40.294Z" }, - { url = "https://files.pythonhosted.org/packages/9c/9b/3c7c5e48456b69365d952ac201666053de2700f5b0144a699a4dc6854507/duckdb-1.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0509b39ea7af8cff0198a99d206dca753c62844adab54e545984c2e2c1381616", size = 15350691, upload-time = "2026-01-26T11:49:43.242Z" }, - { url = "https://files.pythonhosted.org/packages/a6/7b/64e68a7b857ed0340045501535a0da99ea5d9d5ea3708fec0afb8663eb27/duckdb-1.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fb94de6d023de9d79b7edc1ae07ee1d0b4f5fa8a9dcec799650b5befdf7aafec", size = 13672311, upload-time = "2026-01-26T11:49:46.069Z" }, - { url = "https://files.pythonhosted.org/packages/09/5b/3e7aa490841784d223de61beb2ae64e82331501bf5a415dc87a0e27b4663/duckdb-1.4.4-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0d636ceda422e7babd5e2f7275f6a0d1a3405e6a01873f00d38b72118d30c10b", size = 18422740, upload-time = "2026-01-26T11:49:49.034Z" }, - { url = "https://files.pythonhosted.org/packages/53/32/256df3dbaa198c58539ad94f9a41e98c2c8ff23f126b8f5f52c7dcd0a738/duckdb-1.4.4-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7df7351328ffb812a4a289732f500d621e7de9942a3a2c9b6d4afcf4c0e72526", size = 20435578, upload-time = "2026-01-26T11:49:51.946Z" }, - { url = "https://files.pythonhosted.org/packages/a4/f0/620323fd87062ea43e527a2d5ed9e55b525e0847c17d3b307094ddab98a2/duckdb-1.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:6fb1225a9ea5877421481d59a6c556a9532c32c16c7ae6ca8d127e2b878c9389", size = 12268083, upload-time = "2026-01-26T11:49:54.615Z" }, - { url = "https://files.pythonhosted.org/packages/e5/07/a397fdb7c95388ba9c055b9a3d38dfee92093f4427bc6946cf9543b1d216/duckdb-1.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:f28a18cc790217e5b347bb91b2cab27aafc557c58d3d8382e04b4fe55d0c3f66", size = 13006123, upload-time = "2026-01-26T11:49:57.092Z" }, - { url = "https://files.pythonhosted.org/packages/97/a6/f19e2864e651b0bd8e4db2b0c455e7e0d71e0d4cd2cd9cc052f518e43eb3/duckdb-1.4.4-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:25874f8b1355e96178079e37312c3ba6d61a2354f51319dae860cf21335c3a20", size = 28909554, upload-time = "2026-01-26T11:50:00.107Z" }, - { url = "https://files.pythonhosted.org/packages/0e/93/8a24e932c67414fd2c45bed83218e62b73348996bf859eda020c224774b2/duckdb-1.4.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:452c5b5d6c349dc5d1154eb2062ee547296fcbd0c20e9df1ed00b5e1809089da", size = 15353804, upload-time = "2026-01-26T11:50:03.382Z" }, - { url = "https://files.pythonhosted.org/packages/62/13/e5378ff5bb1d4397655d840b34b642b1b23cdd82ae19599e62dc4b9461c9/duckdb-1.4.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8e5c2d8a0452df55e092959c0bfc8ab8897ac3ea0f754cb3b0ab3e165cd79aff", size = 13676157, upload-time = "2026-01-26T11:50:06.232Z" }, - { url = "https://files.pythonhosted.org/packages/2d/94/24364da564b27aeebe44481f15bd0197a0b535ec93f188a6b1b98c22f082/duckdb-1.4.4-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1af6e76fe8bd24875dc56dd8e38300d64dc708cd2e772f67b9fbc635cc3066a3", size = 18426882, upload-time = "2026-01-26T11:50:08.97Z" }, - { url = "https://files.pythonhosted.org/packages/26/0a/6ae31b2914b4dc34243279b2301554bcbc5f1a09ccc82600486c49ab71d1/duckdb-1.4.4-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0440f59e0cd9936a9ebfcf7a13312eda480c79214ffed3878d75947fc3b7d6d", size = 20435641, upload-time = "2026-01-26T11:50:12.188Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b1/fd5c37c53d45efe979f67e9bd49aaceef640147bb18f0699a19edd1874d6/duckdb-1.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:59c8d76016dde854beab844935b1ec31de358d4053e792988108e995b18c08e7", size = 12762360, upload-time = "2026-01-26T11:50:14.76Z" }, - { url = "https://files.pythonhosted.org/packages/dd/2d/13e6024e613679d8a489dd922f199ef4b1d08a456a58eadd96dc2f05171f/duckdb-1.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:53cd6423136ab44383ec9955aefe7599b3fb3dd1fe006161e6396d8167e0e0d4", size = 13458633, upload-time = "2026-01-26T11:50:17.657Z" }, + { url = "https://files.pythonhosted.org/packages/41/de/ebe66bbe78125fc610f4fd415447a65349d94245950f3b3dfb31d028af02/duckdb-1.5.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e6495b00cad16888384119842797c49316a96ae1cb132bb03856d980d95afee1", size = 30064950, upload-time = "2026-04-13T11:29:11.468Z" }, + { url = "https://files.pythonhosted.org/packages/2d/8a/3e25b5d03bcf1fb99d189912f8ce92b1db4f9c8778e1b1f55745973a855a/duckdb-1.5.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d72b8856b1839d35648f38301b058f6232f4d36b463fe4dc8f4d3fdff2df1a2e", size = 15969113, upload-time = "2026-04-13T11:29:14.139Z" }, + { url = "https://files.pythonhosted.org/packages/19/bb/58001f0815002b1a93431bf907f77854085c7d049b83d521814a07b9db0b/duckdb-1.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2a1de4f4d454b8c97aec546c82003fc834d3422ce4bc6a19902f3462ef293bed", size = 14224774, upload-time = "2026-04-13T11:29:16.758Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2f/a7f0de9509d1cef35608aeb382919041cdd70f58c173865c3da6a0d87979/duckdb-1.5.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce0b8141a10d37ecef729c45bc41d334854013f4389f1488bd6035c5579aaac1", size = 19313510, upload-time = "2026-04-13T11:29:19.574Z" }, + { url = "https://files.pythonhosted.org/packages/26/78/eb1e064ea8b9df3b87b167bfd7a407b2f615a4291e06cba756727adfa06c/duckdb-1.5.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99ef73a277c8921bc0a1f16dee38d924484251d9cfd20951748c20fcd5ed855", size = 21429692, upload-time = "2026-04-13T11:29:22.575Z" }, + { url = "https://files.pythonhosted.org/packages/5b/12/05b0c47d14839925c5e35b79081d918ca82e3f236bb724a6f58409dd5291/duckdb-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:8d599758b4e48bf12e18c9b960cf491d219f0c4972d19a45489c05cc5ab36f83", size = 13107594, upload-time = "2026-04-13T11:29:25.43Z" }, + { url = "https://files.pythonhosted.org/packages/0b/2c/80558a82b236e044330e84a154b96aacddb343316b479f3d49be03ea11cb/duckdb-1.5.2-cp312-cp312-win_arm64.whl", hash = "sha256:fc85a5dbcbe6eccac1113c72370d1d3aacfdd49198d63950bdf7d8638a307f00", size = 13927537, upload-time = "2026-04-13T11:29:27.842Z" }, + { url = "https://files.pythonhosted.org/packages/98/f2/e3d742808f138d374be4bb516fade3d1f33749b813650810ab7885cdc363/duckdb-1.5.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:4420b3f47027a7849d0e1815532007f377fa95ee5810b47ea717d35525c12f79", size = 30064879, upload-time = "2026-04-13T11:29:30.763Z" }, + { url = "https://files.pythonhosted.org/packages/72/0d/f3dc1cf97e1267ca15e4307d456f96ce583961f0703fd75e62b2ad8d64fa/duckdb-1.5.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bb42e6ed543902e14eae647850da24103a89f0bc2587dec5601b1c1f213bd2ed", size = 15969327, upload-time = "2026-04-13T11:29:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e0/d5418def53ae4e05a63075705ff44ed5af5a1a5932627eb2b600c5df1c93/duckdb-1.5.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:98c0535cd6d901f61a5ea3c2e26a1fd28482953d794deb183daf568e3aa5dda6", size = 14225107, upload-time = "2026-04-13T11:29:35.882Z" }, + { url = "https://files.pythonhosted.org/packages/16/a7/15aaa59dbecc35e9711980fcdbf525b32a52470b32d18ef678193a146213/duckdb-1.5.2-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:486c862bf7f163c0110b6d85b3e5c031d224a671cca468f12ebb1d3a348f6b39", size = 19313433, upload-time = "2026-04-13T11:29:38.367Z" }, + { url = "https://files.pythonhosted.org/packages/bd/21/d903cc63a5140c822b7b62b373a87dc557e60c29b321dfb435061c5e67cf/duckdb-1.5.2-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70631c847ca918ee710ec874241b00cf9d2e5be90762cbb2a0389f17823c08f7", size = 21429837, upload-time = "2026-04-13T11:29:41.135Z" }, + { url = "https://files.pythonhosted.org/packages/e3/0a/b770d1f60c70597302130d6247f418549b7094251a02348fbaf1c7e147ae/duckdb-1.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:52a21823f3fbb52f0f0e5425e20b07391ad882464b955879499b5ff0b45a376b", size = 13107699, upload-time = "2026-04-13T11:29:43.905Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cf/e200fe431d700962d1a908d2ce89f53ccee1cc8db260174ae663ba09686b/duckdb-1.5.2-cp313-cp313-win_arm64.whl", hash = "sha256:411ad438bd4140f189a10e7f515781335962c5d18bd07837dc6d202e3985253d", size = 13927646, upload-time = "2026-04-13T11:29:46.598Z" }, + { url = "https://files.pythonhosted.org/packages/83/a1/f6286c67726cc1ea60a6e3c0d9fbc66527dde24ae089a51bbe298b13ca78/duckdb-1.5.2-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:6b0fe75c148000f060aa1a27b293cacc0ea08cc1cad724fbf2143d56070a3785", size = 30078598, upload-time = "2026-04-13T11:29:49.828Z" }, + { url = "https://files.pythonhosted.org/packages/de/6a/59febb02f21a4a5c6b0b0099ef7c965fdd5e61e4904cf813809bb792e35f/duckdb-1.5.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:35579b8e3a064b5eaf15b0eafc558056a13f79a0a62e34cc4baf57119daecfec", size = 15975120, upload-time = "2026-04-13T11:29:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/09/70/ce750854d37bb5a45cccbb2c3cb04df4af56aea8fc30a2499bb643b4a9c0/duckdb-1.5.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ea58ff5b0880593a280cf5511734b17711b32ee1f58b47d726e8600848358160", size = 14227762, upload-time = "2026-04-13T11:29:55.564Z" }, + { url = "https://files.pythonhosted.org/packages/28/dc/ad45ac3c0b6c4687dc649e8f6cf01af1c8b0443932a39b2abb4ebcb3babd/duckdb-1.5.2-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef461bca07313412dc09961c4a4757a851f56b95ac01c58fac6007632b7b94f2", size = 19315668, upload-time = "2026-04-13T11:29:58.427Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b1/1464f468d2e5813f5808de95df9d3113a645a5bfa2ffcaecbc542ddae272/duckdb-1.5.2-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be37680ddb380015cb37318e378c53511c45c4f0d8fac5599d22b7d092b9217a", size = 21434056, upload-time = "2026-04-13T11:30:01.238Z" }, + { url = "https://files.pythonhosted.org/packages/ce/32/6673607e024722473fa7aafdd29c0e3dd231dd528f6cd8b5797fbeeb229d/duckdb-1.5.2-cp314-cp314-win_amd64.whl", hash = "sha256:0b291786014df1133f8f18b9df4d004484613146e858d71a21791e0fcca16cf4", size = 13633667, upload-time = "2026-04-13T11:30:04.05Z" }, + { url = "https://files.pythonhosted.org/packages/7a/e3/9d34173ec068631faea3ea6e73050700729363e7e33306a9a3218e5cdc61/duckdb-1.5.2-cp314-cp314-win_arm64.whl", hash = "sha256:c9f3e0b71b8a50fccfb42794899285d9d318ce2503782b9dd54868e5ecd0ad31", size = 14402513, upload-time = "2026-04-13T11:30:06.609Z" }, ] [[package]] @@ -868,6 +999,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" }, ] +[[package]] +name = "email-validator" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" }, +] + [[package]] name = "et-xmlfile" version = "2.0.0" @@ -879,7 +1023,7 @@ wheels = [ [[package]] name = "exa-py" -version = "2.10.1" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpcore" }, @@ -890,33 +1034,25 @@ dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/bb/23c9f78edbf0e0d656839be7346a2f77b9caaae8cc3cb301012c46fd7dc5/exa_py-2.10.1.tar.gz", hash = "sha256:731958c2befc5fc82f031c93cfe7b3d55dc3b0e1bf32f83ec34d32a65ee31ba1", size = 53826, upload-time = "2026-03-25T00:50:49.286Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/e4/11bbbc076ae420b9e00537945d48a03cb42cc6da63edc65bf50d23e4778e/exa_py-2.12.1.tar.gz", hash = "sha256:9ff1924fbfbcae822b20c0ddef0650fabc04ac75906b9153623eadc18135b7ce", size = 55792, upload-time = "2026-04-22T20:00:38.528Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/8d/0665263aa8d51ef8e2a3955e2b56496add4879730451961b09610bbc7036/exa_py-2.10.1-py3-none-any.whl", hash = "sha256:e2174c932764fff747e84e9e6d0637eaa4a6503556014df73a3427f42cc9d6a7", size = 72270, upload-time = "2026-03-25T00:50:47.721Z" }, -] - -[[package]] -name = "fake-useragent" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/43/948d10bf42735709edb5ae51e23297d034086f17fc7279fef385a7acb473/fake_useragent-2.2.0.tar.gz", hash = "sha256:4e6ab6571e40cc086d788523cf9e018f618d07f9050f822ff409a4dfe17c16b2", size = 158898, upload-time = "2025-04-14T15:32:19.238Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/37/b3ea9cd5558ff4cb51957caca2193981c6b0ff30bd0d2630ac62505d99d0/fake_useragent-2.2.0-py3-none-any.whl", hash = "sha256:67f35ca4d847b0d298187443aaf020413746e56acd985a611908c73dba2daa24", size = 161695, upload-time = "2025-04-14T15:32:17.732Z" }, + { url = "https://files.pythonhosted.org/packages/49/19/0a504b6ce7c468595cd0551f65e5c464832a1d3af8dc8acd681e21696a5f/exa_py-2.12.1-py3-none-any.whl", hash = "sha256:9e735802161482a7d5b231376257883cb4e34dbd6f75ded04ab1a5a171b69d9f", size = 74512, upload-time = "2026-04-22T20:00:34.326Z" }, ] [[package]] name = "fastapi" -version = "0.128.0" +version = "0.136.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc" }, { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, + { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682, upload-time = "2025-12-27T15:21:13.714Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5d/45/c130091c2dfa061bbfe3150f2a5091ef1adf149f2a8d2ae769ecaf6e99a2/fastapi-0.136.1.tar.gz", hash = "sha256:7af665ad7acfa0a3baf8983d393b6b471b9da10ede59c60045f49fbc89a0fa7f", size = 397448, upload-time = "2026-04-23T16:49:44.046Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094, upload-time = "2025-12-27T15:21:12.154Z" }, + { url = "https://files.pythonhosted.org/packages/5a/ff/2e4eca3ade2c22fe1dea7043b8ee9dabe47753349eb1b56a202de8af6349/fastapi-0.136.1-py3-none-any.whl", hash = "sha256:a6e9d7eeada96c93a4d69cb03836b44fa34e2854accb7244a1ece36cd4781c3f", size = 117683, upload-time = "2026-04-23T16:49:42.437Z" }, ] [[package]] @@ -930,7 +1066,7 @@ wheels = [ [[package]] name = "firecrawl-py" -version = "4.13.4" +version = "4.23.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -941,9 +1077,9 @@ dependencies = [ { name = "requests" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0e/ea/b3fc460adf3b0bea4e988b25b44f10bc32542734d3509738bd627032f18a/firecrawl_py-4.13.4.tar.gz", hash = "sha256:2e44f3a0631690bd9589dc87544ce7f22a6159f0dbbfb9ed9e5eb8642f24ef4f", size = 164280, upload-time = "2026-01-23T01:27:30.287Z" } +sdist = { url = "https://files.pythonhosted.org/packages/11/a3/5088759334803f2efa1eaa0267d93804a71d934f3185ee125aee7f72f084/firecrawl_py-4.23.0.tar.gz", hash = "sha256:7c65a74e0d328a3cf4af1cd476af2ef34090326225fab65d3fe05a2d32d2b11b", size = 179393, upload-time = "2026-04-22T21:37:54.232Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/f3/c3595e568d0e98ddbdbe4913928a4de95fb416fa1d029ab6d4dc0e8b0dba/firecrawl_py-4.13.4-py3-none-any.whl", hash = "sha256:f529c64ce9f81a42ca55e372153937b044aa29288f31908da54a7fdfc68e782b", size = 206309, upload-time = "2026-01-23T01:27:28.556Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/9ceb86a012dd15c4a1eb176da239b3772bf34ced598d5ca176e2c53acfc0/firecrawl_py-4.23.0-py3-none-any.whl", hash = "sha256:1029f837d1485edf1006485ab3dd94a6a6f5225e4ffef1df2d3e9cdc5c4bd296", size = 224952, upload-time = "2026-04-22T21:37:52.082Z" }, ] [[package]] @@ -1051,16 +1187,15 @@ wheels = [ [[package]] name = "google-auth" -version = "2.48.0" +version = "2.49.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, { name = "pyasn1-modules" }, - { name = "rsa" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0c/41/242044323fbd746615884b1c16639749e73665b718209946ebad7ba8a813/google_auth-2.48.0.tar.gz", hash = "sha256:4f7e706b0cd3208a3d940a19a822c37a476ddba5450156c3e6624a71f7c841ce", size = 326522, upload-time = "2026-01-26T19:22:47.157Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/fc/e925290a1ad95c975c459e2df070fac2b90954e13a0370ac505dff78cb99/google_auth-2.49.2.tar.gz", hash = "sha256:c1ae38500e73065dcae57355adb6278cf8b5c8e391994ae9cbadbcb9631ab409", size = 333958, upload-time = "2026-04-10T00:41:21.888Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/1d/d6466de3a5249d35e832a52834115ca9d1d0de6abc22065f049707516d47/google_auth-2.48.0-py3-none-any.whl", hash = "sha256:2e2a537873d449434252a9632c28bfc268b0adb1e53f9fb62afc5333a975903f", size = 236499, upload-time = "2026-01-26T19:22:45.099Z" }, + { url = "https://files.pythonhosted.org/packages/73/76/d241a5c927433420507215df6cac1b1fa4ac0ba7a794df42a84326c68da8/google_auth-2.49.2-py3-none-any.whl", hash = "sha256:c2720924dfc82dedb962c9f52cabb2ab16714fd0a6a707e40561d217574ed6d5", size = 240638, upload-time = "2026-04-10T00:41:14.501Z" }, ] [package.optional-dependencies] @@ -1070,7 +1205,7 @@ requests = [ [[package]] name = "google-genai" -version = "1.65.0" +version = "1.73.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1084,21 +1219,68 @@ dependencies = [ { name = "typing-extensions" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/f9/cc1191c2540d6a4e24609a586c4ed45d2db57cfef47931c139ee70e5874a/google_genai-1.65.0.tar.gz", hash = "sha256:d470eb600af802d58a79c7f13342d9ea0d05d965007cae8f76c7adff3d7a4750", size = 497206, upload-time = "2026-02-26T00:20:33.824Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/d8/40f5f107e5a2976bbac52d421f04d14fc221b55a8f05e66be44b2f739fe6/google_genai-1.73.1.tar.gz", hash = "sha256:b637e3a3b9e2eccc46f27136d470165803de84eca52abfed2e7352081a4d5a15", size = 530998, upload-time = "2026-04-14T21:06:19.153Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/3c/3fea4e7c91357c71782d7dcaad7a2577d636c90317e003386893c25bc62c/google_genai-1.65.0-py3-none-any.whl", hash = "sha256:68c025205856919bc03edb0155c11b4b833810b7ce17ad4b7a9eeba5158f6c44", size = 724429, upload-time = "2026-02-26T00:20:32.186Z" }, + { url = "https://files.pythonhosted.org/packages/65/af/508e0528015240d710c6763f7c89ff44fab9a94a80b4377e265d692cbfd6/google_genai-1.73.1-py3-none-any.whl", hash = "sha256:af2d2287d25e42a187de19811ef33beb2e347c7e2bdb4dc8c467d78254e43a2c", size = 783595, upload-time = "2026-04-14T21:06:17.464Z" }, ] [[package]] name = "googleapis-common-protos" -version = "1.72.0" +version = "1.74.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e5/7b/adfd75544c415c487b33061fe7ae526165241c1ea133f9a9125a56b39fd8/googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5", size = 147433, upload-time = "2025-11-06T18:29:24.087Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/18/a746c8344152d368a5aac738d4c857012f2c5d1fd2eac7e17b647a7861bd/googleapis_common_protos-1.74.0.tar.gz", hash = "sha256:57971e4eeeba6aad1163c1f0fc88543f965bb49129b8bb55b2b7b26ecab084f1", size = 151254, upload-time = "2026-04-02T21:23:26.679Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b0/be5d3329badb9230b765de6eea66b73abd5944bdeb5afb3562ddcd80ae84/googleapis_common_protos-1.74.0-py3-none-any.whl", hash = "sha256:702216f78610bb510e3f12ac3cafd281b7ac45cc5d86e90ad87e4d301a3426b5", size = 300743, upload-time = "2026-04-02T21:22:49.108Z" }, +] + +[[package]] +name = "greenlet" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/86/94/a5935717b307d7c71fe877b52b884c6af707d2d2090db118a03fbd799369/greenlet-3.4.0.tar.gz", hash = "sha256:f50a96b64dafd6169e595a5c56c9146ef80333e67d4476a65a9c55f400fc22ff", size = 195913, upload-time = "2026-04-08T17:08:00.863Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/8b/3669ad3b3f247a791b2b4aceb3aa5a31f5f6817bf547e4e1ff712338145a/greenlet-3.4.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:1a54a921561dd9518d31d2d3db4d7f80e589083063ab4d3e2e950756ef809e1a", size = 286902, upload-time = "2026-04-08T15:52:12.138Z" }, + { url = "https://files.pythonhosted.org/packages/38/3e/3c0e19b82900873e2d8469b590a6c4b3dfd2b316d0591f1c26b38a4879a5/greenlet-3.4.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16dec271460a9a2b154e3b1c2fa1050ce6280878430320e85e08c166772e3f97", size = 606099, upload-time = "2026-04-08T16:24:38.408Z" }, + { url = "https://files.pythonhosted.org/packages/b5/33/99fef65e7754fc76a4ed14794074c38c9ed3394a5bd129d7f61b705f3168/greenlet-3.4.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:90036ce224ed6fe75508c1907a77e4540176dcf0744473627785dd519c6f9996", size = 618837, upload-time = "2026-04-08T16:30:58.298Z" }, + { url = "https://files.pythonhosted.org/packages/44/57/eae2cac10421feae6c0987e3dc106c6d86262b1cb379e171b017aba893a6/greenlet-3.4.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6f0def07ec9a71d72315cf26c061aceee53b306c36ed38c35caba952ea1b319d", size = 624901, upload-time = "2026-04-08T16:40:38.981Z" }, + { url = "https://files.pythonhosted.org/packages/36/f7/229f3aed6948faa20e0616a0b8568da22e365ede6a54d7d369058b128afd/greenlet-3.4.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a1c4f6b453006efb8310affb2d132832e9bbb4fc01ce6df6b70d810d38f1f6dc", size = 615062, upload-time = "2026-04-08T15:56:33.766Z" }, + { url = "https://files.pythonhosted.org/packages/6a/8a/0e73c9b94f31d1cc257fe79a0eff621674141cdae7d6d00f40de378a1e42/greenlet-3.4.0-cp312-cp312-manylinux_2_39_riscv64.whl", hash = "sha256:0e1254cf0cbaa17b04320c3a78575f29f3c161ef38f59c977108f19ffddaf077", size = 423927, upload-time = "2026-04-08T16:43:05.293Z" }, + { url = "https://files.pythonhosted.org/packages/08/97/d988180011aa40135c46cd0d0cf01dd97f7162bae14139b4a3ef54889ba5/greenlet-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b2d9a138ffa0e306d0e2b72976d2fb10b97e690d40ab36a472acaab0838e2de", size = 1573511, upload-time = "2026-04-08T16:26:20.058Z" }, + { url = "https://files.pythonhosted.org/packages/d4/0f/a5a26fe152fb3d12e6a474181f6e9848283504d0afd095f353d85726374b/greenlet-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8424683caf46eb0eb6f626cb95e008e8cc30d0cb675bdfa48200925c79b38a08", size = 1640396, upload-time = "2026-04-08T15:57:30.88Z" }, + { url = "https://files.pythonhosted.org/packages/42/cf/bb2c32d9a100e36ee9f6e38fad6b1e082b8184010cb06259b49e1266ca01/greenlet-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0a53fb071531d003b075c444014ff8f8b1a9898d36bb88abd9ac7b3524648a2", size = 238892, upload-time = "2026-04-08T17:03:10.094Z" }, + { url = "https://files.pythonhosted.org/packages/b7/47/6c41314bac56e71436ce551c7fbe3cc830ed857e6aa9708dbb9c65142eb6/greenlet-3.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:f38b81880ba28f232f1f675893a39cf7b6db25b31cc0a09bb50787ecf957e85e", size = 235599, upload-time = "2026-04-08T15:52:54.3Z" }, + { url = "https://files.pythonhosted.org/packages/7a/75/7e9cd1126a1e1f0cd67b0eda02e5221b28488d352684704a78ed505bd719/greenlet-3.4.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:43748988b097f9c6f09364f260741aa73c80747f63389824435c7a50bfdfd5c1", size = 285856, upload-time = "2026-04-08T15:52:45.82Z" }, + { url = "https://files.pythonhosted.org/packages/9d/c4/3e2df392e5cb199527c4d9dbcaa75c14edcc394b45040f0189f649631e3c/greenlet-3.4.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5566e4e2cd7a880e8c27618e3eab20f3494452d12fd5129edef7b2f7aa9a36d1", size = 610208, upload-time = "2026-04-08T16:24:39.674Z" }, + { url = "https://files.pythonhosted.org/packages/da/af/750cdfda1d1bd30a6c28080245be8d0346e669a98fdbae7f4102aa95fff3/greenlet-3.4.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1054c5a3c78e2ab599d452f23f7adafef55062a783a8e241d24f3b633ba6ff82", size = 621269, upload-time = "2026-04-08T16:30:59.767Z" }, + { url = "https://files.pythonhosted.org/packages/e0/93/c8c508d68ba93232784bbc1b5474d92371f2897dfc6bc281b419f2e0d492/greenlet-3.4.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:98eedd1803353daf1cd9ef23eef23eda5a4d22f99b1f998d273a8b78b70dd47f", size = 628455, upload-time = "2026-04-08T16:40:40.698Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/0cbc693622cd54ebe25207efbb3a0eb07c2639cb8594f6e3aaaa0bb077a8/greenlet-3.4.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f82cb6cddc27dd81c96b1506f4aa7def15070c3b2a67d4e46fd19016aacce6cf", size = 617549, upload-time = "2026-04-08T15:56:34.893Z" }, + { url = "https://files.pythonhosted.org/packages/7f/46/cfaaa0ade435a60550fd83d07dfd5c41f873a01da17ede5c4cade0b9bab8/greenlet-3.4.0-cp313-cp313-manylinux_2_39_riscv64.whl", hash = "sha256:b7857e2202aae67bc5725e0c1f6403c20a8ff46094ece015e7d474f5f7020b55", size = 426238, upload-time = "2026-04-08T16:43:06.865Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c0/8966767de01343c1ff47e8b855dc78e7d1a8ed2b7b9c83576a57e289f81d/greenlet-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:227a46251ecba4ff46ae742bc5ce95c91d5aceb4b02f885487aff269c127a729", size = 1575310, upload-time = "2026-04-08T16:26:21.671Z" }, + { url = "https://files.pythonhosted.org/packages/b8/38/bcdc71ba05e9a5fda87f63ffc2abcd1f15693b659346df994a48c968003d/greenlet-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5b99e87be7eba788dd5b75ba1cde5639edffdec5f91fe0d734a249535ec3408c", size = 1640435, upload-time = "2026-04-08T15:57:32.572Z" }, + { url = "https://files.pythonhosted.org/packages/a1/c2/19b664b7173b9e4ef5f77e8cef9f14c20ec7fce7920dc1ccd7afd955d093/greenlet-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:849f8bc17acd6295fcb5de8e46d55cc0e52381c56eaf50a2afd258e97bc65940", size = 238760, upload-time = "2026-04-08T17:04:03.878Z" }, + { url = "https://files.pythonhosted.org/packages/9b/96/795619651d39c7fbd809a522f881aa6f0ead504cc8201c3a5b789dfaef99/greenlet-3.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:9390ad88b652b1903814eaabd629ca184db15e0eeb6fe8a390bbf8b9106ae15a", size = 235498, upload-time = "2026-04-08T17:05:00.584Z" }, + { url = "https://files.pythonhosted.org/packages/78/02/bde66806e8f169cf90b14d02c500c44cdbe02c8e224c9c67bafd1b8cadd1/greenlet-3.4.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:10a07aca6babdd18c16a3f4f8880acfffc2b88dfe431ad6aa5f5740759d7d75e", size = 286291, upload-time = "2026-04-08T17:09:34.307Z" }, + { url = "https://files.pythonhosted.org/packages/05/1f/39da1c336a87d47c58352fb8a78541ce63d63ae57c5b9dae1fe02801bbc2/greenlet-3.4.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:076e21040b3a917d3ce4ad68fb5c3c6b32f1405616c4a57aa83120979649bd3d", size = 656749, upload-time = "2026-04-08T16:24:41.721Z" }, + { url = "https://files.pythonhosted.org/packages/d3/6c/90ee29a4ee27af7aa2e2ec408799eeb69ee3fcc5abcecac6ddd07a5cd0f2/greenlet-3.4.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e82689eea4a237e530bb5cb41b180ef81fa2160e1f89422a67be7d90da67f615", size = 669084, upload-time = "2026-04-08T16:31:01.372Z" }, + { url = "https://files.pythonhosted.org/packages/d2/4a/74078d3936712cff6d3c91a930016f476ce4198d84e224fe6d81d3e02880/greenlet-3.4.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:06c2d3b89e0c62ba50bd7adf491b14f39da9e7e701647cb7b9ff4c99bee04b19", size = 673405, upload-time = "2026-04-08T16:40:42.527Z" }, + { url = "https://files.pythonhosted.org/packages/07/49/d4cad6e5381a50947bb973d2f6cf6592621451b09368b8c20d9b8af49c5b/greenlet-3.4.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4df3b0b2289ec686d3c821a5fee44259c05cfe824dd5e6e12c8e5f5df23085cf", size = 665621, upload-time = "2026-04-08T15:56:35.995Z" }, + { url = "https://files.pythonhosted.org/packages/79/3e/df8a83ab894751bc31e1106fdfaa80ca9753222f106b04de93faaa55feb7/greenlet-3.4.0-cp314-cp314-manylinux_2_39_riscv64.whl", hash = "sha256:070b8bac2ff3b4d9e0ff36a0d19e42103331d9737e8504747cd1e659f76297bd", size = 471670, upload-time = "2026-04-08T16:43:08.512Z" }, + { url = "https://files.pythonhosted.org/packages/37/31/d1edd54f424761b5d47718822f506b435b6aab2f3f93b465441143ea5119/greenlet-3.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8bff29d586ea415688f4cec96a591fcc3bf762d046a796cdadc1fdb6e7f2d5bf", size = 1622259, upload-time = "2026-04-08T16:26:23.201Z" }, + { url = "https://files.pythonhosted.org/packages/b0/c6/6d3f9cdcb21c4e12a79cb332579f1c6aa1af78eb68059c5a957c7812d95e/greenlet-3.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8a569c2fb840c53c13a2b8967c63621fafbd1a0e015b9c82f408c33d626a2fda", size = 1686916, upload-time = "2026-04-08T15:57:34.282Z" }, + { url = "https://files.pythonhosted.org/packages/63/45/c1ca4a1ad975de4727e52d3ffe641ae23e1d7a8ffaa8ff7a0477e1827b92/greenlet-3.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:207ba5b97ea8b0b60eb43ffcacf26969dd83726095161d676aac03ff913ee50d", size = 239821, upload-time = "2026-04-08T17:03:48.423Z" }, + { url = "https://files.pythonhosted.org/packages/71/c4/6f621023364d7e85a4769c014c8982f98053246d142420e0328980933ceb/greenlet-3.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:f8296d4e2b92af34ebde81085a01690f26a51eb9ac09a0fcadb331eb36dbc802", size = 236932, upload-time = "2026-04-08T17:04:33.551Z" }, + { url = "https://files.pythonhosted.org/packages/d4/8f/18d72b629783f5e8d045a76f5325c1e938e659a9e4da79c7dcd10169a48d/greenlet-3.4.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d70012e51df2dbbccfaf63a40aaf9b40c8bed37c3e3a38751c926301ce538ece", size = 294681, upload-time = "2026-04-08T15:52:35.778Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ad/5fa86ec46769c4153820d58a04062285b3b9e10ba3d461ee257b68dcbf53/greenlet-3.4.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a58bec0751f43068cd40cff31bb3ca02ad6000b3a51ca81367af4eb5abc480c8", size = 658899, upload-time = "2026-04-08T16:24:43.32Z" }, + { url = "https://files.pythonhosted.org/packages/43/f0/4e8174ca0e87ae748c409f055a1ba161038c43cc0a5a6f1433a26ac2e5bf/greenlet-3.4.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05fa0803561028f4b2e3b490ee41216a842eaee11aed004cc343a996d9523aa2", size = 665284, upload-time = "2026-04-08T16:31:02.833Z" }, + { url = "https://files.pythonhosted.org/packages/ef/92/466b0d9afd44b8af623139a3599d651c7564fa4152f25f117e1ee5949ffb/greenlet-3.4.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c4cd56a9eb7a6444edbc19062f7b6fbc8f287c663b946e3171d899693b1c19fa", size = 665872, upload-time = "2026-04-08T16:40:43.912Z" }, + { url = "https://files.pythonhosted.org/packages/19/da/991cf7cd33662e2df92a1274b7eb4d61769294d38a1bba8a45f31364845e/greenlet-3.4.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e60d38719cb80b3ab5e85f9f1aed4960acfde09868af6762ccb27b260d68f4ed", size = 661861, upload-time = "2026-04-08T15:56:37.269Z" }, + { url = "https://files.pythonhosted.org/packages/0d/14/3395a7ef3e260de0325152ddfe19dffb3e49fe10873b94654352b53ad48e/greenlet-3.4.0-cp314-cp314t-manylinux_2_39_riscv64.whl", hash = "sha256:1f85f204c4d54134ae850d401fa435c89cd667d5ce9dc567571776b45941af72", size = 489237, upload-time = "2026-04-08T16:43:09.993Z" }, + { url = "https://files.pythonhosted.org/packages/36/c5/6c2c708e14db3d9caea4b459d8464f58c32047451142fe2cfd90e7458f41/greenlet-3.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7f50c804733b43eded05ae694691c9aa68bca7d0a867d67d4a3f514742a2d53f", size = 1622182, upload-time = "2026-04-08T16:26:24.777Z" }, + { url = "https://files.pythonhosted.org/packages/7a/4c/50c5fed19378e11a29fabab1f6be39ea95358f4a0a07e115a51ca93385d8/greenlet-3.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2d4f0635dc4aa638cda4b2f5a07ae9a2cff9280327b581a3fcb6f317b4fbc38a", size = 1685050, upload-time = "2026-04-08T15:57:36.453Z" }, + { url = "https://files.pythonhosted.org/packages/db/72/85ae954d734703ab48e622c59d4ce35d77ce840c265814af9c078cacc7aa/greenlet-3.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:1a4a48f24681300c640f143ba7c404270e1ebbbcf34331d7104a4ff40f8ea705", size = 245554, upload-time = "2026-04-08T17:03:50.044Z" }, ] [[package]] @@ -1207,28 +1389,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] -[[package]] -name = "h2" -version = "4.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "hpack" }, - { name = "hyperframe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026, upload-time = "2025-08-23T18:12:19.778Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" }, -] - -[[package]] -name = "hpack" -version = "4.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" }, -] - [[package]] name = "html5lib" version = "1.1" @@ -1300,13 +1460,6 @@ wheels = [ ] [package.optional-dependencies] -brotli = [ - { name = "brotli", marker = "platform_python_implementation == 'CPython'" }, - { name = "brotlicffi", marker = "platform_python_implementation != 'CPython'" }, -] -http2 = [ - { name = "h2" }, -] socks = [ { name = "socksio" }, ] @@ -1332,22 +1485,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" }, ] -[[package]] -name = "hyperframe" -version = "6.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, -] - [[package]] name = "idna" -version = "3.11" +version = "3.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/cc/762dfb036166873f0059f3b7de4565e1b5bc3d6f28a414c13da27e442f99/idna-3.13.tar.gz", hash = "sha256:585ea8fe5d69b9181ec1afba340451fba6ba764af97026f92a91d4eef164a242", size = 194210, upload-time = "2026-04-22T16:42:42.314Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, + { url = "https://files.pythonhosted.org/packages/5d/13/ad7d7ca3808a898b4612b6fe93cde56b53f3034dcde235acb1f0e1df24c6/idna-3.13-py3-none-any.whl", hash = "sha256:892ea0cde124a99ce773decba204c5552b69c3c67ffd5f232eb7696135bc8bb3", size = 68629, upload-time = "2026-04-22T16:42:40.909Z" }, ] [[package]] @@ -1382,70 +1526,74 @@ wheels = [ [[package]] name = "jiter" -version = "0.12.0" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294, upload-time = "2025-11-09T20:49:23.302Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/c1/0cddc6eb17d4c53a99840953f95dd3accdc5cfc7a337b0e9b26476276be9/jiter-0.14.0.tar.gz", hash = "sha256:e8a39e66dac7153cf3f964a12aad515afa8d74938ec5cc0018adcdae5367c79e", size = 165725, upload-time = "2026-04-10T14:28:42.01Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449, upload-time = "2025-11-09T20:47:22.999Z" }, - { url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855, upload-time = "2025-11-09T20:47:24.779Z" }, - { url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171, upload-time = "2025-11-09T20:47:26.469Z" }, - { url = "https://files.pythonhosted.org/packages/f5/27/a7b818b9979ac31b3763d25f3653ec3a954044d5e9f5d87f2f247d679fd1/jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf", size = 365590, upload-time = "2025-11-09T20:47:27.918Z" }, - { url = "https://files.pythonhosted.org/packages/ba/7e/e46195801a97673a83746170b17984aa8ac4a455746354516d02ca5541b4/jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1", size = 479462, upload-time = "2025-11-09T20:47:29.654Z" }, - { url = "https://files.pythonhosted.org/packages/ca/75/f833bfb009ab4bd11b1c9406d333e3b4357709ed0570bb48c7c06d78c7dd/jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df", size = 378983, upload-time = "2025-11-09T20:47:31.026Z" }, - { url = "https://files.pythonhosted.org/packages/71/b3/7a69d77943cc837d30165643db753471aff5df39692d598da880a6e51c24/jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403", size = 361328, upload-time = "2025-11-09T20:47:33.286Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ac/a78f90caf48d65ba70d8c6efc6f23150bc39dc3389d65bbec2a95c7bc628/jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126", size = 386740, upload-time = "2025-11-09T20:47:34.703Z" }, - { url = "https://files.pythonhosted.org/packages/39/b6/5d31c2cc8e1b6a6bcf3c5721e4ca0a3633d1ab4754b09bc7084f6c4f5327/jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9", size = 520875, upload-time = "2025-11-09T20:47:36.058Z" }, - { url = "https://files.pythonhosted.org/packages/30/b5/4df540fae4e9f68c54b8dab004bd8c943a752f0b00efd6e7d64aa3850339/jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86", size = 511457, upload-time = "2025-11-09T20:47:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546, upload-time = "2025-11-09T20:47:40.47Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196, upload-time = "2025-11-09T20:47:41.794Z" }, - { url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100, upload-time = "2025-11-09T20:47:43.007Z" }, - { url = "https://files.pythonhosted.org/packages/3d/a6/97209693b177716e22576ee1161674d1d58029eb178e01866a0422b69224/jiter-0.12.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6cc49d5130a14b732e0612bc76ae8db3b49898732223ef8b7599aa8d9810683e", size = 313658, upload-time = "2025-11-09T20:47:44.424Z" }, - { url = "https://files.pythonhosted.org/packages/06/4d/125c5c1537c7d8ee73ad3d530a442d6c619714b95027143f1b61c0b4dfe0/jiter-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:37f27a32ce36364d2fa4f7fdc507279db604d27d239ea2e044c8f148410defe1", size = 318605, upload-time = "2025-11-09T20:47:45.973Z" }, - { url = "https://files.pythonhosted.org/packages/99/bf/a840b89847885064c41a5f52de6e312e91fa84a520848ee56c97e4fa0205/jiter-0.12.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbc0944aa3d4b4773e348cda635252824a78f4ba44328e042ef1ff3f6080d1cf", size = 349803, upload-time = "2025-11-09T20:47:47.535Z" }, - { url = "https://files.pythonhosted.org/packages/8a/88/e63441c28e0db50e305ae23e19c1d8fae012d78ed55365da392c1f34b09c/jiter-0.12.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da25c62d4ee1ffbacb97fac6dfe4dcd6759ebdc9015991e92a6eae5816287f44", size = 365120, upload-time = "2025-11-09T20:47:49.284Z" }, - { url = "https://files.pythonhosted.org/packages/0a/7c/49b02714af4343970eb8aca63396bc1c82fa01197dbb1e9b0d274b550d4e/jiter-0.12.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:048485c654b838140b007390b8182ba9774621103bd4d77c9c3f6f117474ba45", size = 479918, upload-time = "2025-11-09T20:47:50.807Z" }, - { url = "https://files.pythonhosted.org/packages/69/ba/0a809817fdd5a1db80490b9150645f3aae16afad166960bcd562be194f3b/jiter-0.12.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:635e737fbb7315bef0037c19b88b799143d2d7d3507e61a76751025226b3ac87", size = 379008, upload-time = "2025-11-09T20:47:52.211Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c3/c9fc0232e736c8877d9e6d83d6eeb0ba4e90c6c073835cc2e8f73fdeef51/jiter-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e017c417b1ebda911bd13b1e40612704b1f5420e30695112efdbed8a4b389ed", size = 361785, upload-time = "2025-11-09T20:47:53.512Z" }, - { url = "https://files.pythonhosted.org/packages/96/61/61f69b7e442e97ca6cd53086ddc1cf59fb830549bc72c0a293713a60c525/jiter-0.12.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:89b0bfb8b2bf2351fba36bb211ef8bfceba73ef58e7f0c68fb67b5a2795ca2f9", size = 386108, upload-time = "2025-11-09T20:47:54.893Z" }, - { url = "https://files.pythonhosted.org/packages/e9/2e/76bb3332f28550c8f1eba3bf6e5efe211efda0ddbbaf24976bc7078d42a5/jiter-0.12.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:f5aa5427a629a824a543672778c9ce0c5e556550d1569bb6ea28a85015287626", size = 519937, upload-time = "2025-11-09T20:47:56.253Z" }, - { url = "https://files.pythonhosted.org/packages/84/d6/fa96efa87dc8bff2094fb947f51f66368fa56d8d4fc9e77b25d7fbb23375/jiter-0.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed53b3d6acbcb0fd0b90f20c7cb3b24c357fe82a3518934d4edfa8c6898e498c", size = 510853, upload-time = "2025-11-09T20:47:58.32Z" }, - { url = "https://files.pythonhosted.org/packages/8a/28/93f67fdb4d5904a708119a6ab58a8f1ec226ff10a94a282e0215402a8462/jiter-0.12.0-cp313-cp313-win32.whl", hash = "sha256:4747de73d6b8c78f2e253a2787930f4fffc68da7fa319739f57437f95963c4de", size = 204699, upload-time = "2025-11-09T20:47:59.686Z" }, - { url = "https://files.pythonhosted.org/packages/c4/1f/30b0eb087045a0abe2a5c9c0c0c8da110875a1d3be83afd4a9a4e548be3c/jiter-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:e25012eb0c456fcc13354255d0338cd5397cce26c77b2832b3c4e2e255ea5d9a", size = 204258, upload-time = "2025-11-09T20:48:01.01Z" }, - { url = "https://files.pythonhosted.org/packages/2c/f4/2b4daf99b96bce6fc47971890b14b2a36aef88d7beb9f057fafa032c6141/jiter-0.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:c97b92c54fe6110138c872add030a1f99aea2401ddcdaa21edf74705a646dd60", size = 185503, upload-time = "2025-11-09T20:48:02.35Z" }, - { url = "https://files.pythonhosted.org/packages/39/ca/67bb15a7061d6fe20b9b2a2fd783e296a1e0f93468252c093481a2f00efa/jiter-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53839b35a38f56b8be26a7851a48b89bc47e5d88e900929df10ed93b95fea3d6", size = 317965, upload-time = "2025-11-09T20:48:03.783Z" }, - { url = "https://files.pythonhosted.org/packages/18/af/1788031cd22e29c3b14bc6ca80b16a39a0b10e611367ffd480c06a259831/jiter-0.12.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94f669548e55c91ab47fef8bddd9c954dab1938644e715ea49d7e117015110a4", size = 345831, upload-time = "2025-11-09T20:48:05.55Z" }, - { url = "https://files.pythonhosted.org/packages/05/17/710bf8472d1dff0d3caf4ced6031060091c1320f84ee7d5dcbed1f352417/jiter-0.12.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:351d54f2b09a41600ffea43d081522d792e81dcfb915f6d2d242744c1cc48beb", size = 361272, upload-time = "2025-11-09T20:48:06.951Z" }, - { url = "https://files.pythonhosted.org/packages/fb/f1/1dcc4618b59761fef92d10bcbb0b038b5160be653b003651566a185f1a5c/jiter-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2a5e90604620f94bf62264e7c2c038704d38217b7465b863896c6d7c902b06c7", size = 204604, upload-time = "2025-11-09T20:48:08.328Z" }, - { url = "https://files.pythonhosted.org/packages/d9/32/63cb1d9f1c5c6632a783c0052cde9ef7ba82688f7065e2f0d5f10a7e3edb/jiter-0.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:88ef757017e78d2860f96250f9393b7b577b06a956ad102c29c8237554380db3", size = 185628, upload-time = "2025-11-09T20:48:09.572Z" }, - { url = "https://files.pythonhosted.org/packages/a8/99/45c9f0dbe4a1416b2b9a8a6d1236459540f43d7fb8883cff769a8db0612d/jiter-0.12.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:c46d927acd09c67a9fb1416df45c5a04c27e83aae969267e98fba35b74e99525", size = 312478, upload-time = "2025-11-09T20:48:10.898Z" }, - { url = "https://files.pythonhosted.org/packages/4c/a7/54ae75613ba9e0f55fcb0bc5d1f807823b5167cc944e9333ff322e9f07dd/jiter-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:774ff60b27a84a85b27b88cd5583899c59940bcc126caca97eb2a9df6aa00c49", size = 318706, upload-time = "2025-11-09T20:48:12.266Z" }, - { url = "https://files.pythonhosted.org/packages/59/31/2aa241ad2c10774baf6c37f8b8e1f39c07db358f1329f4eb40eba179c2a2/jiter-0.12.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5433fab222fb072237df3f637d01b81f040a07dcac1cb4a5c75c7aa9ed0bef1", size = 351894, upload-time = "2025-11-09T20:48:13.673Z" }, - { url = "https://files.pythonhosted.org/packages/54/4f/0f2759522719133a9042781b18cc94e335b6d290f5e2d3e6899d6af933e3/jiter-0.12.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f8c593c6e71c07866ec6bfb790e202a833eeec885022296aff6b9e0b92d6a70e", size = 365714, upload-time = "2025-11-09T20:48:15.083Z" }, - { url = "https://files.pythonhosted.org/packages/dc/6f/806b895f476582c62a2f52c453151edd8a0fde5411b0497baaa41018e878/jiter-0.12.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90d32894d4c6877a87ae00c6b915b609406819dce8bc0d4e962e4de2784e567e", size = 478989, upload-time = "2025-11-09T20:48:16.706Z" }, - { url = "https://files.pythonhosted.org/packages/86/6c/012d894dc6e1033acd8db2b8346add33e413ec1c7c002598915278a37f79/jiter-0.12.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:798e46eed9eb10c3adbbacbd3bdb5ecd4cf7064e453d00dbef08802dae6937ff", size = 378615, upload-time = "2025-11-09T20:48:18.614Z" }, - { url = "https://files.pythonhosted.org/packages/87/30/d718d599f6700163e28e2c71c0bbaf6dace692e7df2592fd793ac9276717/jiter-0.12.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3f1368f0a6719ea80013a4eb90ba72e75d7ea67cfc7846db2ca504f3df0169a", size = 364745, upload-time = "2025-11-09T20:48:20.117Z" }, - { url = "https://files.pythonhosted.org/packages/8f/85/315b45ce4b6ddc7d7fceca24068543b02bdc8782942f4ee49d652e2cc89f/jiter-0.12.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65f04a9d0b4406f7e51279710b27484af411896246200e461d80d3ba0caa901a", size = 386502, upload-time = "2025-11-09T20:48:21.543Z" }, - { url = "https://files.pythonhosted.org/packages/74/0b/ce0434fb40c5b24b368fe81b17074d2840748b4952256bab451b72290a49/jiter-0.12.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:fd990541982a24281d12b67a335e44f117e4c6cbad3c3b75c7dea68bf4ce3a67", size = 519845, upload-time = "2025-11-09T20:48:22.964Z" }, - { url = "https://files.pythonhosted.org/packages/e8/a3/7a7a4488ba052767846b9c916d208b3ed114e3eb670ee984e4c565b9cf0d/jiter-0.12.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:b111b0e9152fa7df870ecaebb0bd30240d9f7fff1f2003bcb4ed0f519941820b", size = 510701, upload-time = "2025-11-09T20:48:24.483Z" }, - { url = "https://files.pythonhosted.org/packages/c3/16/052ffbf9d0467b70af24e30f91e0579e13ded0c17bb4a8eb2aed3cb60131/jiter-0.12.0-cp314-cp314-win32.whl", hash = "sha256:a78befb9cc0a45b5a5a0d537b06f8544c2ebb60d19d02c41ff15da28a9e22d42", size = 205029, upload-time = "2025-11-09T20:48:25.749Z" }, - { url = "https://files.pythonhosted.org/packages/e4/18/3cf1f3f0ccc789f76b9a754bdb7a6977e5d1d671ee97a9e14f7eb728d80e/jiter-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:e1fe01c082f6aafbe5c8faf0ff074f38dfb911d53f07ec333ca03f8f6226debf", size = 204960, upload-time = "2025-11-09T20:48:27.415Z" }, - { url = "https://files.pythonhosted.org/packages/02/68/736821e52ecfdeeb0f024b8ab01b5a229f6b9293bbdb444c27efade50b0f/jiter-0.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:d72f3b5a432a4c546ea4bedc84cce0c3404874f1d1676260b9c7f048a9855451", size = 185529, upload-time = "2025-11-09T20:48:29.125Z" }, - { url = "https://files.pythonhosted.org/packages/30/61/12ed8ee7a643cce29ac97c2281f9ce3956eb76b037e88d290f4ed0d41480/jiter-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e6ded41aeba3603f9728ed2b6196e4df875348ab97b28fc8afff115ed42ba7a7", size = 318974, upload-time = "2025-11-09T20:48:30.87Z" }, - { url = "https://files.pythonhosted.org/packages/2d/c6/f3041ede6d0ed5e0e79ff0de4c8f14f401bbf196f2ef3971cdbe5fd08d1d/jiter-0.12.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a947920902420a6ada6ad51892082521978e9dd44a802663b001436e4b771684", size = 345932, upload-time = "2025-11-09T20:48:32.658Z" }, - { url = "https://files.pythonhosted.org/packages/d5/5d/4d94835889edd01ad0e2dbfc05f7bdfaed46292e7b504a6ac7839aa00edb/jiter-0.12.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:add5e227e0554d3a52cf390a7635edaffdf4f8fce4fdbcef3cc2055bb396a30c", size = 367243, upload-time = "2025-11-09T20:48:34.093Z" }, - { url = "https://files.pythonhosted.org/packages/fd/76/0051b0ac2816253a99d27baf3dda198663aff882fa6ea7deeb94046da24e/jiter-0.12.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f9b1cda8fcb736250d7e8711d4580ebf004a46771432be0ae4796944b5dfa5d", size = 479315, upload-time = "2025-11-09T20:48:35.507Z" }, - { url = "https://files.pythonhosted.org/packages/70/ae/83f793acd68e5cb24e483f44f482a1a15601848b9b6f199dacb970098f77/jiter-0.12.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:deeb12a2223fe0135c7ff1356a143d57f95bbf1f4a66584f1fc74df21d86b993", size = 380714, upload-time = "2025-11-09T20:48:40.014Z" }, - { url = "https://files.pythonhosted.org/packages/b1/5e/4808a88338ad2c228b1126b93fcd8ba145e919e886fe910d578230dabe3b/jiter-0.12.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c596cc0f4cb574877550ce4ecd51f8037469146addd676d7c1a30ebe6391923f", size = 365168, upload-time = "2025-11-09T20:48:41.462Z" }, - { url = "https://files.pythonhosted.org/packages/0c/d4/04619a9e8095b42aef436b5aeb4c0282b4ff1b27d1db1508df9f5dc82750/jiter-0.12.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ab4c823b216a4aeab3fdbf579c5843165756bd9ad87cc6b1c65919c4715f783", size = 387893, upload-time = "2025-11-09T20:48:42.921Z" }, - { url = "https://files.pythonhosted.org/packages/17/ea/d3c7e62e4546fdc39197fa4a4315a563a89b95b6d54c0d25373842a59cbe/jiter-0.12.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e427eee51149edf962203ff8db75a7514ab89be5cb623fb9cea1f20b54f1107b", size = 520828, upload-time = "2025-11-09T20:48:44.278Z" }, - { url = "https://files.pythonhosted.org/packages/cc/0b/c6d3562a03fd767e31cb119d9041ea7958c3c80cb3d753eafb19b3b18349/jiter-0.12.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:edb868841f84c111255ba5e80339d386d937ec1fdce419518ce1bd9370fac5b6", size = 511009, upload-time = "2025-11-09T20:48:45.726Z" }, - { url = "https://files.pythonhosted.org/packages/aa/51/2cb4468b3448a8385ebcd15059d325c9ce67df4e2758d133ab9442b19834/jiter-0.12.0-cp314-cp314t-win32.whl", hash = "sha256:8bbcfe2791dfdb7c5e48baf646d37a6a3dcb5a97a032017741dea9f817dca183", size = 205110, upload-time = "2025-11-09T20:48:47.033Z" }, - { url = "https://files.pythonhosted.org/packages/b2/c5/ae5ec83dec9c2d1af805fd5fe8f74ebded9c8670c5210ec7820ce0dbeb1e/jiter-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2fa940963bf02e1d8226027ef461e36af472dea85d36054ff835aeed944dd873", size = 205223, upload-time = "2025-11-09T20:48:49.076Z" }, - { url = "https://files.pythonhosted.org/packages/97/9a/3c5391907277f0e55195550cf3fa8e293ae9ee0c00fb402fec1e38c0c82f/jiter-0.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:506c9708dd29b27288f9f8f1140c3cb0e3d8ddb045956d7757b1fa0e0f39a473", size = 185564, upload-time = "2025-11-09T20:48:50.376Z" }, - { url = "https://files.pythonhosted.org/packages/cb/f5/12efb8ada5f5c9edc1d4555fe383c1fb2eac05ac5859258a72d61981d999/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb", size = 309974, upload-time = "2025-11-09T20:49:17.187Z" }, - { url = "https://files.pythonhosted.org/packages/85/15/d6eb3b770f6a0d332675141ab3962fd4a7c270ede3515d9f3583e1d28276/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b", size = 304233, upload-time = "2025-11-09T20:49:18.734Z" }, - { url = "https://files.pythonhosted.org/packages/8c/3e/e7e06743294eea2cf02ced6aa0ff2ad237367394e37a0e2b4a1108c67a36/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f", size = 338537, upload-time = "2025-11-09T20:49:20.317Z" }, - { url = "https://files.pythonhosted.org/packages/2f/9c/6753e6522b8d0ef07d3a3d239426669e984fb0eba15a315cdbc1253904e4/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c", size = 346110, upload-time = "2025-11-09T20:49:21.817Z" }, + { url = "https://files.pythonhosted.org/packages/5a/68/7390a418f10897da93b158f2d5a8bd0bcd73a0f9ec3bb36917085bb759ef/jiter-0.14.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb2ce3a7bc331256dfb14cefc34832366bb28a9aca81deaf43bbf2a5659e607", size = 316295, upload-time = "2026-04-10T14:26:24.887Z" }, + { url = "https://files.pythonhosted.org/packages/60/a0/5854ac00ff63551c52c6c89534ec6aba4b93474e7924d64e860b1c94165b/jiter-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5252a7ca23785cef5d02d4ece6077a1b556a410c591b379f82091c3001e14844", size = 315898, upload-time = "2026-04-10T14:26:26.601Z" }, + { url = "https://files.pythonhosted.org/packages/41/a1/4f44832650a16b18e8391f1bf1d6ca4909bc738351826bcc198bba4357f4/jiter-0.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c409578cbd77c338975670ada777add4efd53379667edf0aceea730cabede6fb", size = 343730, upload-time = "2026-04-10T14:26:28.326Z" }, + { url = "https://files.pythonhosted.org/packages/48/64/a329e9d469f86307203594b1707e11ae51c3348d03bfd514a5f997870012/jiter-0.14.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ede4331a1899d604463369c730dbb961ffdc5312bc7f16c41c2896415b1304a", size = 370102, upload-time = "2026-04-10T14:26:30.089Z" }, + { url = "https://files.pythonhosted.org/packages/94/c1/5e3dfc59635aa4d4c7bd20a820ac1d09b8ed851568356802cf1c08edb3cf/jiter-0.14.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92cd8b6025981a041f5310430310b55b25ca593972c16407af8837d3d7d2ca01", size = 461335, upload-time = "2026-04-10T14:26:31.911Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1b/dd157009dbc058f7b00108f545ccb72a2d56461395c4fc7b9cfdccb00af4/jiter-0.14.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:351bf6eda4e3a7ceb876377840c702e9a3e4ecc4624dbfb2d6463c67ae52637d", size = 378536, upload-time = "2026-04-10T14:26:33.595Z" }, + { url = "https://files.pythonhosted.org/packages/91/78/256013667b7c10b8834f8e6e54cd3e562d4c6e34227a1596addccc05e38c/jiter-0.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1dcfbeb93d9ecd9ca128bbf8910120367777973fa193fb9a39c31237d8df165", size = 353859, upload-time = "2026-04-10T14:26:35.098Z" }, + { url = "https://files.pythonhosted.org/packages/de/d9/137d65ade9093a409fe80955ce60b12bb753722c986467aeda47faf450ad/jiter-0.14.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:ae039aaef8de3f8157ecc1fdd4d85043ac4f57538c245a0afaecb8321ec951c3", size = 357626, upload-time = "2026-04-10T14:26:36.685Z" }, + { url = "https://files.pythonhosted.org/packages/2e/48/76750835b87029342727c1a268bea8878ab988caf81ee4e7b880900eeb5a/jiter-0.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7d9d51eb96c82a9652933bd769fe6de66877d6eb2b2440e281f2938c51b5643e", size = 393172, upload-time = "2026-04-10T14:26:38.097Z" }, + { url = "https://files.pythonhosted.org/packages/a6/60/456c4e81d5c8045279aefe60e9e483be08793828800a4e64add8fdde7f2a/jiter-0.14.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d824ca4148b705970bf4e120924a212fdfca9859a73e42bd7889a63a4ea6bb98", size = 520300, upload-time = "2026-04-10T14:26:39.532Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9f/2020e0984c235f678dced38fe4eec3058cf528e6af36ebf969b410305941/jiter-0.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff3a6465b3a0f54b1a430f45c3c0ba7d61ceb45cbc3e33f9e1a7f638d690baf3", size = 553059, upload-time = "2026-04-10T14:26:40.991Z" }, + { url = "https://files.pythonhosted.org/packages/ef/32/e2d298e1a22a4bbe6062136d1c7192db7dba003a6975e51d9a9eecabc4c2/jiter-0.14.0-cp312-cp312-win32.whl", hash = "sha256:5dec7c0a3e98d2a3f8a2e67382d0d7c3ac60c69103a4b271da889b4e8bb1e129", size = 206030, upload-time = "2026-04-10T14:26:42.517Z" }, + { url = "https://files.pythonhosted.org/packages/36/ac/96369141b3d8a4a8e4590e983085efe1c436f35c0cda940dd76d942e3e40/jiter-0.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:fc7e37b4b8bc7e80a63ad6cfa5fc11fab27dbfea4cc4ae644b1ab3f273dc348f", size = 201603, upload-time = "2026-04-10T14:26:44.328Z" }, + { url = "https://files.pythonhosted.org/packages/01/c3/75d847f264647017d7e3052bbcc8b1e24b95fa139c320c5f5066fa7a0bdd/jiter-0.14.0-cp312-cp312-win_arm64.whl", hash = "sha256:ee4a72f12847ef29b072aee9ad5474041ab2924106bdca9fcf5d7d965853e057", size = 191525, upload-time = "2026-04-10T14:26:46Z" }, + { url = "https://files.pythonhosted.org/packages/97/2a/09f70020898507a89279659a1afe3364d57fc1b2c89949081975d135f6f5/jiter-0.14.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:af72f204cf4d44258e5b4c1745130ac45ddab0e71a06333b01de660ab4187a94", size = 315502, upload-time = "2026-04-10T14:26:47.697Z" }, + { url = "https://files.pythonhosted.org/packages/d6/be/080c96a45cd74f9fce5db4fd68510b88087fb37ffe2541ff73c12db92535/jiter-0.14.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4b77da71f6e819be5fbcec11a453fde5b1d0267ef6ed487e2a392fd8e14e4e3a", size = 314870, upload-time = "2026-04-10T14:26:49.149Z" }, + { url = "https://files.pythonhosted.org/packages/7d/5e/2d0fee155826a968a832cc32438de5e2a193292c8721ca70d0b53e58245b/jiter-0.14.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f4ea612fe8b84b8b04e51d0e78029ecf3466348e25973f953de6e6a59aa4c1", size = 343406, upload-time = "2026-04-10T14:26:50.762Z" }, + { url = "https://files.pythonhosted.org/packages/70/af/bf9ee0d3a4f8dc0d679fc1337f874fe60cdbf841ebbb304b374e1c9aaceb/jiter-0.14.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62fe2451f8fcc0240261e6a4df18ecbcd58327857e61e625b2393ea3b468aac9", size = 369415, upload-time = "2026-04-10T14:26:52.188Z" }, + { url = "https://files.pythonhosted.org/packages/0f/83/8e8561eadba31f4d3948a5b712fb0447ec71c3560b57a855449e7b8ddc98/jiter-0.14.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6112f26f5afc75bcb475787d29da3aa92f9d09c7858f632f4be6ffe607be82e9", size = 461456, upload-time = "2026-04-10T14:26:53.611Z" }, + { url = "https://files.pythonhosted.org/packages/f6/c9/c5299e826a5fe6108d172b344033f61c69b1bb979dd8d9ddd4278a160971/jiter-0.14.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:215a6cb8fb7dc702aa35d475cc00ddc7f970e5c0b1417fb4b4ac5d82fa2a29db", size = 378488, upload-time = "2026-04-10T14:26:55.211Z" }, + { url = "https://files.pythonhosted.org/packages/5d/37/c16d9d15c0a471b8644b1abe3c82668092a707d9bedcf076f24ff2e380cd/jiter-0.14.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ab96a30fb3cb2c7e0cd33f7616c8860da5f5674438988a54ac717caccdbaa", size = 353242, upload-time = "2026-04-10T14:26:56.705Z" }, + { url = "https://files.pythonhosted.org/packages/58/ea/8050cb0dc654e728e1bfacbc0c640772f2181af5dedd13ae70145743a439/jiter-0.14.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:3a99c1387b1f2928f799a9de899193484d66206a50e98233b6b088a7f0c1edb2", size = 356823, upload-time = "2026-04-10T14:26:58.281Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/cf71506d270e5f84d97326bf220e47aed9b95e9a4a060758fb07772170ab/jiter-0.14.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ab18d11074485438695f8d34a1b6da61db9754248f96d51341956607a8f39985", size = 392564, upload-time = "2026-04-10T14:27:00.018Z" }, + { url = "https://files.pythonhosted.org/packages/b0/cc/8c6c74a3efb5bd671bfd14f51e8a73375464ca914b1551bc3b40e26ac2c9/jiter-0.14.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:801028dcfc26ac0895e4964cbc0fd62c73be9fd4a7d7b1aaf6e5790033a719b7", size = 520322, upload-time = "2026-04-10T14:27:01.664Z" }, + { url = "https://files.pythonhosted.org/packages/41/24/68d7b883ec959884ddf00d019b2e0e82ba81b167e1253684fa90519ce33c/jiter-0.14.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ad425b087aafb4a1c7e1e98a279200743b9aaf30c3e0ba723aec93f061bd9bc8", size = 552619, upload-time = "2026-04-10T14:27:03.316Z" }, + { url = "https://files.pythonhosted.org/packages/b6/89/b1a0985223bbf3150ff9e8f46f98fc9360c1de94f48abe271bbe1b465682/jiter-0.14.0-cp313-cp313-win32.whl", hash = "sha256:882bcb9b334318e233950b8be366fe5f92c86b66a7e449e76975dfd6d776a01f", size = 205699, upload-time = "2026-04-10T14:27:04.662Z" }, + { url = "https://files.pythonhosted.org/packages/4c/19/3f339a5a7f14a11730e67f6be34f9d5105751d547b615ef593fa122a5ded/jiter-0.14.0-cp313-cp313-win_amd64.whl", hash = "sha256:9b8c571a5dba09b98bd3462b5a53f27209a5cbbe85670391692ede71974e979f", size = 201323, upload-time = "2026-04-10T14:27:06.139Z" }, + { url = "https://files.pythonhosted.org/packages/50/56/752dd89c84be0e022a8ea3720bcfa0a8431db79a962578544812ce061739/jiter-0.14.0-cp313-cp313-win_arm64.whl", hash = "sha256:34f19dcc35cb1abe7c369b3756babf8c7f04595c0807a848df8f26ef8298ef92", size = 191099, upload-time = "2026-04-10T14:27:07.564Z" }, + { url = "https://files.pythonhosted.org/packages/91/28/292916f354f25a1fe8cf2c918d1415c699a4a659ae00be0430e1c5d9ffea/jiter-0.14.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e89bcd7d426a75bb4952c696b267075790d854a07aad4c9894551a82c5b574ab", size = 320880, upload-time = "2026-04-10T14:27:09.326Z" }, + { url = "https://files.pythonhosted.org/packages/ad/c7/b002a7d8b8957ac3d469bd59c18ef4b1595a5216ae0de639a287b9816023/jiter-0.14.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b25beaa0d4447ea8c7ae0c18c688905d34840d7d0b937f2f7bdd52162c98a40", size = 346563, upload-time = "2026-04-10T14:27:11.287Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3b/f8d07580d8706021d255a6356b8fab13ee4c869412995550ce6ed4ddf97d/jiter-0.14.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:651a8758dd413c51e3b7f6557cdc6921faf70b14106f45f969f091f5cda990ea", size = 357928, upload-time = "2026-04-10T14:27:12.729Z" }, + { url = "https://files.pythonhosted.org/packages/47/5b/ac1a974da29e35507230383110ffec59998b290a8732585d04e19a9eb5ba/jiter-0.14.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e1a7eead856a5038a8d291f1447176ab0b525c77a279a058121b5fccee257f6f", size = 203519, upload-time = "2026-04-10T14:27:14.125Z" }, + { url = "https://files.pythonhosted.org/packages/96/6d/9fc8433d667d2454271378a79747d8c76c10b51b482b454e6190e511f244/jiter-0.14.0-cp313-cp313t-win_arm64.whl", hash = "sha256:2e692633a12cda97e352fdcd1c4acc971b1c28707e1e33aeef782b0cbf051975", size = 190113, upload-time = "2026-04-10T14:27:16.638Z" }, + { url = "https://files.pythonhosted.org/packages/4f/1e/354ed92461b165bd581f9ef5150971a572c873ec3b68a916d5aa91da3cc2/jiter-0.14.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:6f396837fc7577871ca8c12edaf239ed9ccef3bbe39904ae9b8b63ce0a48b140", size = 315277, upload-time = "2026-04-10T14:27:18.109Z" }, + { url = "https://files.pythonhosted.org/packages/a6/95/8c7c7028aa8636ac21b7a55faef3e34215e6ed0cbf5ae58258427f621aa3/jiter-0.14.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a4d50ea3d8ba4176f79754333bd35f1bbcd28e91adc13eb9b7ca91bc52a6cef9", size = 315923, upload-time = "2026-04-10T14:27:19.603Z" }, + { url = "https://files.pythonhosted.org/packages/47/40/e2a852a44c4a089f2681a16611b7ce113224a80fd8504c46d78491b47220/jiter-0.14.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce17f8a050447d1b4153bda4fb7d26e6a9e74eb4f4a41913f30934c5075bf615", size = 344943, upload-time = "2026-04-10T14:27:21.262Z" }, + { url = "https://files.pythonhosted.org/packages/fc/1f/670f92adee1e9895eac41e8a4d623b6da68c4d46249d8b556b60b63f949e/jiter-0.14.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4f1c4b125e1652aefbc2e2c1617b60a160ab789d180e3d423c41439e5f32850", size = 369725, upload-time = "2026-04-10T14:27:22.766Z" }, + { url = "https://files.pythonhosted.org/packages/01/2f/541c9ba567d05de1c4874a0f8f8c5e3fd78e2b874266623da9a775cf46e0/jiter-0.14.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be808176a6a3a14321d18c603f2d40741858a7c4fc982f83232842689fe86dd9", size = 461210, upload-time = "2026-04-10T14:27:24.315Z" }, + { url = "https://files.pythonhosted.org/packages/ce/a9/c31cbec09627e0d5de7aeaec7690dba03e090caa808fefd8133137cf45bc/jiter-0.14.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26679d58ba816f88c3849306dd58cb863a90a1cf352cdd4ef67e30ccf8a77994", size = 380002, upload-time = "2026-04-10T14:27:26.155Z" }, + { url = "https://files.pythonhosted.org/packages/50/02/3c05c1666c41904a2f607475a73e7a4763d1cbde2d18229c4f85b22dc253/jiter-0.14.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80381f5a19af8fa9aef743f080e34f6b25ebd89656475f8cf0470ec6157052aa", size = 354678, upload-time = "2026-04-10T14:27:27.701Z" }, + { url = "https://files.pythonhosted.org/packages/7d/97/e15b33545c2b13518f560d695f974b9891b311641bdcf178d63177e8801e/jiter-0.14.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:004df5fdb8ecbd6d99f3227df18ba1a259254c4359736a2e6f036c944e02d7c5", size = 358920, upload-time = "2026-04-10T14:27:29.256Z" }, + { url = "https://files.pythonhosted.org/packages/ad/d2/8b1461def6b96ba44530df20d07ef7a1c7da22f3f9bf1727e2d611077bf1/jiter-0.14.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cff5708f7ed0fa098f2b53446c6fa74c48469118e5cd7497b4f1cd569ab06928", size = 394512, upload-time = "2026-04-10T14:27:31.344Z" }, + { url = "https://files.pythonhosted.org/packages/e3/88/837566dd6ed6e452e8d3205355afd484ce44b2533edfa4ed73a298ea893e/jiter-0.14.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:2492e5f06c36a976d25c7cc347a60e26d5470178d44cde1b9b75e60b4e519f28", size = 521120, upload-time = "2026-04-10T14:27:33.299Z" }, + { url = "https://files.pythonhosted.org/packages/89/6b/b00b45c4d1b4c031777fe161d620b755b5b02cdade1e316dcb46e4471d63/jiter-0.14.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:7609cfbe3a03d37bfdbf5052012d5a879e72b83168a363deae7b3a26564d57de", size = 553668, upload-time = "2026-04-10T14:27:34.868Z" }, + { url = "https://files.pythonhosted.org/packages/ad/d8/6fe5b42011d19397433d345716eac16728ac241862a2aac9c91923c7509a/jiter-0.14.0-cp314-cp314-win32.whl", hash = "sha256:7282342d32e357543565286b6450378c3cd402eea333fc1ebe146f1fabb306fc", size = 207001, upload-time = "2026-04-10T14:27:36.455Z" }, + { url = "https://files.pythonhosted.org/packages/e5/43/5c2e08da1efad5e410f0eaaabeadd954812612c33fbbd8fd5328b489139d/jiter-0.14.0-cp314-cp314-win_amd64.whl", hash = "sha256:bd77945f38866a448e73b0b7637366afa814d4617790ecd88a18ca74377e6c02", size = 202187, upload-time = "2026-04-10T14:27:38Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1f/6e39ac0b4cdfa23e606af5b245df5f9adaa76f35e0c5096790da430ca506/jiter-0.14.0-cp314-cp314-win_arm64.whl", hash = "sha256:f2d4c61da0821ee42e0cdf5489da60a6d074306313a377c2b35af464955a3611", size = 192257, upload-time = "2026-04-10T14:27:39.504Z" }, + { url = "https://files.pythonhosted.org/packages/05/57/7dbc0ffbbb5176a27e3518716608aa464aee2e2887dc938f0b900a120449/jiter-0.14.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1bf7ff85517dd2f20a5750081d2b75083c1b269cf75afc7511bdf1f9548beb3b", size = 323441, upload-time = "2026-04-10T14:27:41.039Z" }, + { url = "https://files.pythonhosted.org/packages/83/6e/7b3314398d8983f06b557aa21b670511ec72d3b79a68ee5e4d9bff972286/jiter-0.14.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8ef8791c3e78d6c6b157c6d360fbb5c715bebb8113bc6a9303c5caff012754a", size = 348109, upload-time = "2026-04-10T14:27:42.552Z" }, + { url = "https://files.pythonhosted.org/packages/ae/4f/8dc674bcd7db6dba566de73c08c763c337058baff1dbeb34567045b27cdc/jiter-0.14.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e74663b8b10da1fe0f4e4703fd7980d24ad17174b6bb35d8498d6e3ebce2ae6a", size = 368328, upload-time = "2026-04-10T14:27:44.574Z" }, + { url = "https://files.pythonhosted.org/packages/3b/5f/188e09a1f20906f98bbdec44ed820e19f4e8eb8aff88b9d1a5a497587ff3/jiter-0.14.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1aca29ba52913f78362ec9c2da62f22cdc4c3083313403f90c15460979b84d9b", size = 463301, upload-time = "2026-04-10T14:27:46.717Z" }, + { url = "https://files.pythonhosted.org/packages/ac/f0/19046ef965ed8f349e8554775bb12ff4352f443fbe12b95d31f575891256/jiter-0.14.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b39b7d87a952b79949af5fef44d2544e58c21a28da7f1bae3ef166455c61746", size = 378891, upload-time = "2026-04-10T14:27:48.32Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c3/da43bd8431ee175695777ee78cf0e93eacbb47393ff493f18c45231b427d/jiter-0.14.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d918a68b26e9fab068c2b5453577ef04943ab2807b9a6275df2a812599a310", size = 360749, upload-time = "2026-04-10T14:27:49.88Z" }, + { url = "https://files.pythonhosted.org/packages/72/26/e054771be889707c6161dbdec9c23d33a9ec70945395d70f07cfea1e9a6f/jiter-0.14.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:b08997c35aee1201c1a5361466a8fb9162d03ae7bf6568df70b6c859f1e654a4", size = 358526, upload-time = "2026-04-10T14:27:51.504Z" }, + { url = "https://files.pythonhosted.org/packages/c3/0f/7bea65ea2a6d91f2bf989ff11a18136644392bf2b0497a1fa50934c30a9c/jiter-0.14.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:260bf7ca20704d58d41f669e5e9fe7fe2fa72901a6b324e79056f5d52e9c9be2", size = 393926, upload-time = "2026-04-10T14:27:53.368Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/b1ff7d70deef61ac0b7c6c2f12d2ace950cdeecb4fdc94500a0926802857/jiter-0.14.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:37826e3df29e60f30a382f9294348d0238ef127f4b5d7f5f8da78b5b9e050560", size = 521052, upload-time = "2026-04-10T14:27:55.058Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7b/3b0649983cbaf15eda26a414b5b1982e910c67bd6f7b1b490f3cfc76896a/jiter-0.14.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:645be49c46f2900937ba0eaf871ad5183c96858c0af74b6becc7f4e367e36e06", size = 553716, upload-time = "2026-04-10T14:27:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/97/f8/33d78c83bd93ae0c0af05293a6660f88a1977caef39a6d72a84afab94ce0/jiter-0.14.0-cp314-cp314t-win32.whl", hash = "sha256:2f7877ed45118de283786178eceaf877110abacd04fde31efff3940ae9672674", size = 207957, upload-time = "2026-04-10T14:27:59.285Z" }, + { url = "https://files.pythonhosted.org/packages/d6/ac/2b760516c03e2227826d1f7025d89bf6bf6357a28fe75c2a2800873c50bf/jiter-0.14.0-cp314-cp314t-win_amd64.whl", hash = "sha256:14c0cb10337c49f5eafe8e7364daca5e29a020ea03580b8f8e6c597fed4e1588", size = 204690, upload-time = "2026-04-10T14:28:00.962Z" }, + { url = "https://files.pythonhosted.org/packages/dc/2e/a44c20c58aeed0355f2d326969a181696aeb551a25195f47563908a815be/jiter-0.14.0-cp314-cp314t-win_arm64.whl", hash = "sha256:5419d4aa2024961da9fe12a9cfe7484996735dca99e8e090b5c88595ef1951ff", size = 191338, upload-time = "2026-04-10T14:28:02.853Z" }, + { url = "https://files.pythonhosted.org/packages/21/42/9042c3f3019de4adcb8c16591c325ec7255beea9fcd33a42a43f3b0b1000/jiter-0.14.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:fbd9e482663ca9d005d051330e4d2d8150bb208a209409c10f7e7dfdf7c49da9", size = 308810, upload-time = "2026-04-10T14:28:34.673Z" }, + { url = "https://files.pythonhosted.org/packages/60/cf/a7e19b308bd86bb04776803b1f01a5f9a287a4c55205f4708827ee487fbf/jiter-0.14.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:33a20d838b91ef376b3a56896d5b04e725c7df5bc4864cc6569cf046a8d73b6d", size = 308443, upload-time = "2026-04-10T14:28:36.658Z" }, + { url = "https://files.pythonhosted.org/packages/ca/44/e26ede3f0caeff93f222559cb0cc4ca68579f07d009d7b6010c5b586f9b1/jiter-0.14.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:432c4db5255d86a259efde91e55cb4c8d18c0521d844c9e2e7efcce3899fb016", size = 343039, upload-time = "2026-04-10T14:28:38.356Z" }, + { url = "https://files.pythonhosted.org/packages/da/e9/1f9ada30cef7b05e74bb06f52127e7a724976c225f46adb65c37b1dadfb6/jiter-0.14.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f00d94b281174144d6532a04b66a12cb866cbdc47c3af3bfe2973677f9861a", size = 349613, upload-time = "2026-04-10T14:28:40.066Z" }, ] [[package]] @@ -1462,11 +1610,11 @@ wheels = [ [[package]] name = "jsonpointer" -version = "3.0.0" +version = "3.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114, upload-time = "2024-06-10T19:24:42.462Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/c7/af399a2e7a67fd18d63c40c5e62d3af4e67b836a2107468b6a5ea24c4304/jsonpointer-3.1.1.tar.gz", hash = "sha256:0b801c7db33a904024f6004d526dcc53bbb8a4a0f4e32bfd10beadf60adf1900", size = 9068, upload-time = "2026-03-23T22:32:32.458Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6a/a83720e953b1682d2d109d3c2dbb0bc9bf28cc1cbc205be4ef4be5da709d/jsonpointer-3.1.1-py3-none-any.whl", hash = "sha256:8ff8b95779d071ba472cf5bc913028df06031797532f08a7d5b602d8b2a488ca", size = 7659, upload-time = "2026-03-23T22:32:31.568Z" }, ] [[package]] @@ -1486,24 +1634,33 @@ wheels = [ [[package]] name = "jsonschema-rs" -version = "0.29.1" +version = "0.44.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/b4/33a9b25cad41d1e533c1ab7ff30eaec50628dd1bcb92171b99a2e944d61f/jsonschema_rs-0.29.1.tar.gz", hash = "sha256:a9f896a9e4517630374f175364705836c22f09d5bd5bbb06ec0611332b6702fd", size = 1406679, upload-time = "2025-02-08T21:25:12.639Z" } +sdist = { url = "https://files.pythonhosted.org/packages/68/88/f0cc7013ad6a3d0b86275a6d0a3112eaa705545c89134ab2a057865c054c/jsonschema_rs-0.44.1.tar.gz", hash = "sha256:49ca909cc3017990a732145b9a7c2f1a0727b2f95dba4190c05a514575b5f4bf", size = 1975289, upload-time = "2026-03-03T19:08:21.892Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/4a/67ea15558ab85e67d1438b2e5da63b8e89b273c457106cbc87f8f4959a3d/jsonschema_rs-0.29.1-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:9fe7529faa6a84d23e31b1f45853631e4d4d991c85f3d50e6d1df857bb52b72d", size = 3825206, upload-time = "2025-02-08T21:24:19.985Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2e/bc75ed65d11ba47200ade9795ebd88eb2e64c2852a36d9be640172563430/jsonschema_rs-0.29.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5d7e385298f250ed5ce4928fd59fabf2b238f8167f2c73b9414af8143dfd12e", size = 1966302, upload-time = "2025-02-08T21:24:21.673Z" }, - { url = "https://files.pythonhosted.org/packages/95/dd/4a90e96811f897de066c69d95bc0983138056b19cb169f2a99c736e21933/jsonschema_rs-0.29.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64a29be0504731a2e3164f66f609b9999aa66a2df3179ecbfc8ead88e0524388", size = 2062846, upload-time = "2025-02-08T21:24:23.171Z" }, - { url = "https://files.pythonhosted.org/packages/21/91/61834396748a741021716751a786312b8a8319715e6c61421447a07c887c/jsonschema_rs-0.29.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e91defda5dfa87306543ee9b34d97553d9422c134998c0b64855b381f8b531d", size = 2065564, upload-time = "2025-02-08T21:24:24.574Z" }, - { url = "https://files.pythonhosted.org/packages/f0/2c/920d92e88b9bdb6cb14867a55e5572e7b78bfc8554f9c625caa516aa13dd/jsonschema_rs-0.29.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96f87680a6a1c16000c851d3578534ae3c154da894026c2a09a50f727bd623d4", size = 2083055, upload-time = "2025-02-08T21:24:26.834Z" }, - { url = "https://files.pythonhosted.org/packages/6d/0a/f4c1bea3193992fe4ff9ce330c6a594481caece06b1b67d30b15992bbf54/jsonschema_rs-0.29.1-cp312-cp312-win32.whl", hash = "sha256:bcfc0d52ecca6c1b2fbeede65c1ad1545de633045d42ad0c6699039f28b5fb71", size = 1701065, upload-time = "2025-02-08T21:24:28.282Z" }, - { url = "https://files.pythonhosted.org/packages/5e/89/3f89de071920208c0eb64b827a878d2e587f6a3431b58c02f63c3468b76e/jsonschema_rs-0.29.1-cp312-cp312-win_amd64.whl", hash = "sha256:a414c162d687ee19171e2d8aae821f396d2f84a966fd5c5c757bd47df0954452", size = 1871774, upload-time = "2025-02-08T21:24:30.824Z" }, - { url = "https://files.pythonhosted.org/packages/1b/9b/d642024e8b39753b789598363fd5998eb3053b52755a5df6a021d53741d5/jsonschema_rs-0.29.1-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0afee5f31a940dec350a33549ec03f2d1eda2da3049a15cd951a266a57ef97ee", size = 3824864, upload-time = "2025-02-08T21:24:32.252Z" }, - { url = "https://files.pythonhosted.org/packages/aa/3d/48a7baa2373b941e89a12e720dae123fd0a663c28c4e82213a29c89a4715/jsonschema_rs-0.29.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:c38453a5718bcf2ad1b0163d128814c12829c45f958f9407c69009d8b94a1232", size = 1966084, upload-time = "2025-02-08T21:24:33.8Z" }, - { url = "https://files.pythonhosted.org/packages/1e/e4/f260917a17bb28bb1dec6fa5e869223341fac2c92053aa9bd23c1caaefa0/jsonschema_rs-0.29.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5dc8bdb1067bf4f6d2f80001a636202dc2cea027b8579f1658ce8e736b06557f", size = 2062430, upload-time = "2025-02-08T21:24:35.174Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e7/61353403b76768601d802afa5b7b5902d52c33d1dd0f3159aafa47463634/jsonschema_rs-0.29.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bcfe23992623a540169d0845ea8678209aa2fe7179941dc7c512efc0c2b6b46", size = 2065443, upload-time = "2025-02-08T21:24:36.778Z" }, - { url = "https://files.pythonhosted.org/packages/40/ed/40b971a09f46a22aa956071ea159413046e9d5fcd280a5910da058acdeb2/jsonschema_rs-0.29.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f2a526c0deacd588864d3400a0997421dffef6fe1df5cfda4513a453c01ad42", size = 2082606, upload-time = "2025-02-08T21:24:38.388Z" }, - { url = "https://files.pythonhosted.org/packages/bc/59/1c142e1bfb87d57c18fb189149f7aa8edf751725d238d787015278b07600/jsonschema_rs-0.29.1-cp313-cp313-win32.whl", hash = "sha256:68acaefb54f921243552d15cfee3734d222125584243ca438de4444c5654a8a3", size = 1700666, upload-time = "2025-02-08T21:24:40.573Z" }, - { url = "https://files.pythonhosted.org/packages/13/e8/f0ad941286cd350b879dd2b3c848deecd27f0b3fbc0ff44f2809ad59718d/jsonschema_rs-0.29.1-cp313-cp313-win_amd64.whl", hash = "sha256:1c4e5a61ac760a2fc3856a129cc84aa6f8fba7b9bc07b19fe4101050a8ecc33c", size = 1871619, upload-time = "2025-02-08T21:24:42.286Z" }, + { url = "https://files.pythonhosted.org/packages/47/59/57efa11b8a7069687c7d741849a75092cbb4a6bdce30d52a2832a168c3c5/jsonschema_rs-0.44.1-cp310-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:6f8be6467ee403e126e4e0abb68f13cfbf7199db54d5a4c0f2a1b00e1304f2e3", size = 7365683, upload-time = "2026-03-03T19:07:34.512Z" }, + { url = "https://files.pythonhosted.org/packages/02/39/b1ec92bd383d9e8e0cd70f019f0c047313e4980a3f7e653cfb3270a84310/jsonschema_rs-0.44.1-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:95434b4858da6feb4b3769c955b78204dbc90988941e9e848596ab93c6005d00", size = 3828559, upload-time = "2026-03-03T19:07:36.965Z" }, + { url = "https://files.pythonhosted.org/packages/39/97/0b581ce2ca6b6ca3f29cea189609c893aa3c033356a7cb6950cb7559bdc0/jsonschema_rs-0.44.1-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0329af23e7674d88c3117b55c89a0c36e06ee359e696be16796a29c8b1c33e85", size = 3572164, upload-time = "2026-03-03T19:07:38.651Z" }, + { url = "https://files.pythonhosted.org/packages/35/a9/6d750088795947a5366cdfa6b9064680a3b0a86f61806521beb35d88c8fb/jsonschema_rs-0.44.1-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8078c834c3cea6303796fc4925bb8646d1f68313bd54f6d3dde08c8b8eb74bc1", size = 3926333, upload-time = "2026-03-03T19:07:40.369Z" }, + { url = "https://files.pythonhosted.org/packages/a8/19/6475da01b4e81c0445698290a7b8f237e678a0dc9fbf55df663243597b70/jsonschema_rs-0.44.1-cp310-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:502af60c802cf149185ea01edbd31a143b09aaf06b27b6422f8b8893984b1998", size = 3589764, upload-time = "2026-03-03T19:07:42.113Z" }, + { url = "https://files.pythonhosted.org/packages/fe/43/dd8d1a8dcd3dd44e7242944433d86433540ed71a5906d0d75b5dd4fb3352/jsonschema_rs-0.44.1-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f2760c4791ecc3c7e6196cec7e7dbf191205e36dd050119cfab421e108e8508", size = 3782136, upload-time = "2026-03-03T19:07:44.505Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/8ada7636eb2119482fecc6289c3115b27cb045384896e45b8bd0fec98d5b/jsonschema_rs-0.44.1-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:16d663e6c4838e4d594bd9d10c5939a6737c171d9c8600659fe6612098863d3d", size = 4151840, upload-time = "2026-03-03T19:07:46.754Z" }, + { url = "https://files.pythonhosted.org/packages/bf/7e/f163531f203fa4e11871a40a04dc280a94a2b88c2eaa32db7c71cb64b5b4/jsonschema_rs-0.44.1-cp310-abi3-win32.whl", hash = "sha256:cbec5ef1a0cc327cbc829f44a9c76778881003ada99c871a14438c7e8b264e76", size = 3197538, upload-time = "2026-03-03T19:07:48.12Z" }, + { url = "https://files.pythonhosted.org/packages/73/4b/c080db0f50b7a320c80991f3cc9069d865f6a8baadd2952fda7473cf3816/jsonschema_rs-0.44.1-cp310-abi3-win_amd64.whl", hash = "sha256:cee075749f0479599586b4f591940418e45eae65485ed29e84763a28ec9dd40c", size = 3748176, upload-time = "2026-03-03T19:07:49.698Z" }, + { url = "https://files.pythonhosted.org/packages/ed/9f/2f602bf9d3958866f03732abefc51f8bc6caa0f8ea913b8f0ac01923e886/jsonschema_rs-0.44.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:99c0c3e4a786d1e9c25dbd58cc9781f3c3d25c9fbd76310a350de55315f05948", size = 3817433, upload-time = "2026-03-03T19:07:51.161Z" }, + { url = "https://files.pythonhosted.org/packages/42/cf/d899a52ca5fd7846614e15a230845d19070eec865b0791108b14341ef39e/jsonschema_rs-0.44.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:516bfb8926de7d396e4bc9a1c5085870de0035e8e2324014251d091a55a03623", size = 3570909, upload-time = "2026-03-03T19:07:53.117Z" }, + { url = "https://files.pythonhosted.org/packages/53/bb/cc3fda5594cdc3626e479f868f28b5a1d9091296e764ca041d2580d0a292/jsonschema_rs-0.44.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:225074845f6a67e8e3ac18311f87a0ab925ae5adf16466be61c7d1df01eca20a", size = 3920084, upload-time = "2026-03-03T19:07:54.827Z" }, + { url = "https://files.pythonhosted.org/packages/bc/75/49e09ce6b72f8d25813842d9184678d6be92f0a3e90f0276a995c5712986/jsonschema_rs-0.44.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:782d01412e77c83bb376d31aac8afbd06b97e3594f09d1e0304ad22c2382077b", size = 3584852, upload-time = "2026-03-03T19:07:56.508Z" }, + { url = "https://files.pythonhosted.org/packages/51/67/4e52d1ab98c8656a66ca1b0422af18da5a5525d6aa23c57be455bdcc6515/jsonschema_rs-0.44.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2afe720dfa1f93235b78e812937039537b63bf4eab6ca3c9ecb7fd7ba08a865d", size = 3776400, upload-time = "2026-03-03T19:07:58.392Z" }, + { url = "https://files.pythonhosted.org/packages/d8/44/5cd424c80df74ad159aceaf59071f26a7b7decf925a952c8929c2e097375/jsonschema_rs-0.44.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:548a1f466ce5b904c9cc52eee8f887c3838377ed95f4525d0ee5896a321e89d5", size = 4144701, upload-time = "2026-03-03T19:08:00.476Z" }, + { url = "https://files.pythonhosted.org/packages/6f/4f/4f8c9a423b2f539b22f0fc314063b724da82df3116a57149c2d730943150/jsonschema_rs-0.44.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8a758e422c4ec265e64f2232409ddc5976b28e94e84a8e5565a2bce169ab72e9", size = 3739509, upload-time = "2026-03-03T19:08:01.97Z" }, + { url = "https://files.pythonhosted.org/packages/76/85/759020a30874df8053c2abf91ed8abe8f27e69e683ed1d94ac2bbf92e7a8/jsonschema_rs-0.44.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ca8ddd724b73678f5f3d3d8f948ae40fa817ad9edd5ce4e732ae26cb0f9dd300", size = 3816826, upload-time = "2026-03-03T19:08:03.411Z" }, + { url = "https://files.pythonhosted.org/packages/45/77/47720717c3008483ff54365f65dbfb264d6dd3b3e7a2367d7f4f0a0a76e4/jsonschema_rs-0.44.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1ff6c9868c8f2834952efa0555fd82d0ab19664ba6b17f481330c64f7af7177d", size = 3569065, upload-time = "2026-03-03T19:08:05.305Z" }, + { url = "https://files.pythonhosted.org/packages/06/88/734eb132228c09b2e0a442da6686efe8bd0c8f0095b13d52b46dcacea735/jsonschema_rs-0.44.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec883313f3782f1c0ffc58ceda55136e26967198523b9cd111af782e273659a3", size = 3918339, upload-time = "2026-03-03T19:08:07.004Z" }, + { url = "https://files.pythonhosted.org/packages/01/9c/6c4ca6c6bc906e4d74425d48c7fd49e558ec4ec98fb792d549c3ed95a632/jsonschema_rs-0.44.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:f971acf2910e64f0960080db6b6c73df483318d9db992273885f596cc3a9a5d9", size = 3583541, upload-time = "2026-03-03T19:08:08.683Z" }, + { url = "https://files.pythonhosted.org/packages/14/13/f907c17fc0de4d653cac237846303a164ae58d26656d4161ad4c13d5267a/jsonschema_rs-0.44.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:50f5c28fd54236e43f392041f06132b0e9f09dd261cb00236045078d98e3cf84", size = 3774990, upload-time = "2026-03-03T19:08:10.278Z" }, + { url = "https://files.pythonhosted.org/packages/83/1c/141b5b43db5aeac7d54cf3022cfa6a2941fe4d550afacfcf7bbcd49e66fa/jsonschema_rs-0.44.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbc59d68f38a377117b84b8109af269813a39b4b961e803876767e4fab6bac98", size = 4143282, upload-time = "2026-03-03T19:08:12.229Z" }, + { url = "https://files.pythonhosted.org/packages/79/8a/6d4d55583e97d37ca7ac5595d978a83ecfbf8c113ebe31496f7330c72a49/jsonschema_rs-0.44.1-cp314-cp314t-win_amd64.whl", hash = "sha256:049203fd4876f2ec96191c0f8befabf33289988c57e4f191b5fd5974de1fb07f", size = 3738147, upload-time = "2026-03-03T19:08:13.58Z" }, ] [[package]] @@ -1540,38 +1697,39 @@ wheels = [ [[package]] name = "langchain" -version = "1.2.3" +version = "1.2.15" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "langgraph" }, { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5f/78/9565319259d92818d96f30d55507ee1072fbf5c008b95a6acecf5e47c4d6/langchain-1.2.3.tar.gz", hash = "sha256:9d6171f9c3c760ca3c7c2cf8518e6f8625380962c488b41e35ebff1f1d611077", size = 548296, upload-time = "2026-01-08T20:26:30.149Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/3f/888a7099d2bd2917f8b0c3ffc7e347f1e664cf64267820b0b923c4f339fc/langchain-1.2.15.tar.gz", hash = "sha256:1717b6719daefae90b2728314a5e2a117ff916291e2862595b6c3d6fba33d652", size = 574732, upload-time = "2026-04-03T14:26:03.994Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/e5/9b4f58533f8ce3013b1a993289eb11e8607d9c9d9d14699b29c6ac3b4132/langchain-1.2.3-py3-none-any.whl", hash = "sha256:5cdc7c80f672962b030c4b0d16d0d8f26d849c0ada63a4b8653a20d7505512ae", size = 106428, upload-time = "2026-01-08T20:26:29.162Z" }, + { url = "https://files.pythonhosted.org/packages/3f/e8/a3b8cb0005553f6a876865073c81ef93bd7c5b18381bcb9ba4013af96ebc/langchain-1.2.15-py3-none-any.whl", hash = "sha256:e349db349cb3e9550c4044077cf90a1717691756cc236438404b23500e615874", size = 112714, upload-time = "2026-04-03T14:26:02.557Z" }, ] [[package]] name = "langchain-anthropic" -version = "1.3.4" +version = "1.4.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anthropic" }, { name = "langchain-core" }, { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/30/4e/7c1ffac126f5e62b0b9066f331f91ae69361e73476fd3ca1b19f8d8a3cc3/langchain_anthropic-1.3.4.tar.gz", hash = "sha256:000ed4c2d6fb8842b4ffeed22a74a3e84f9e9bcb63638e4abbb4a1d8ffa07211", size = 671858, upload-time = "2026-02-24T13:54:01.738Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/19/55e0d3548a4d85ccee630fcfc0979af54ff4ea4f39ab0ed1ed3c6bf25f4e/langchain_anthropic-1.4.1.tar.gz", hash = "sha256:e17d027091438620e35ff2f06aefdfd63c8dcdf6abc606009ddfe3c764f2bc2e", size = 676043, upload-time = "2026-04-17T14:26:17.31Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/cf/b7c7b7270efbb3db2edbf14b09ba9110a41628f3a85a11cae9527a35641c/langchain_anthropic-1.3.4-py3-none-any.whl", hash = "sha256:cd112dcc8049aef09f58b3c4338b2c9db5ee98105e08664954a4e40d8bf120b9", size = 47454, upload-time = "2026-02-24T13:54:00.53Z" }, + { url = "https://files.pythonhosted.org/packages/61/0a/20625dfea38a26e8b43654e18cafbc3595e7dc223da80f23d4fa8451dc1d/langchain_anthropic-1.4.1-py3-none-any.whl", hash = "sha256:5a48afbb2b1bad9c46badaccc8e23b0dd7ae07b7583f76bac21ddb3dac831efd", size = 49020, upload-time = "2026-04-17T14:26:15.989Z" }, ] [[package]] name = "langchain-core" -version = "1.2.28" +version = "1.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jsonpatch" }, + { name = "langchain-protocol" }, { name = "langsmith" }, { name = "packaging" }, { name = "pydantic" }, @@ -1580,9 +1738,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "uuid-utils" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/a4/317a1a3ac1df33a64adb3670bf88bbe3b3d5baa274db6863a979db472897/langchain_core-1.2.28.tar.gz", hash = "sha256:271a3d8bd618f795fdeba112b0753980457fc90537c46a0c11998516a74dc2cb", size = 846119, upload-time = "2026-04-08T18:19:34.867Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/03/7219502e8ca728d65eb44d7a3eb60239230742a70dbfc9241b9bfd61c4ab/langchain_core-1.3.2.tar.gz", hash = "sha256:fd7a50b2f28ba561fd9d7f5d2760bc9e06cf00cdf820a3ccafe88a94ffa8d5b7", size = 911813, upload-time = "2026-04-24T15:49:23.699Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/92/32f785f077c7e898da97064f113c73fbd9ad55d1e2169cf3a391b183dedb/langchain_core-1.2.28-py3-none-any.whl", hash = "sha256:80764232581eaf8057bcefa71dbf8adc1f6a28d257ebd8b95ba9b8b452e8c6ac", size = 508727, upload-time = "2026-04-08T18:19:32.823Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d5/8fa4431007cbb7cfed7590f4d6a5dea3ad724f4174d248f6642ef5ce7d05/langchain_core-1.3.2-py3-none-any.whl", hash = "sha256:d44a66127f9f8db735bdfd0ab9661bccb47a97113cfd3f2d89c74864422b7274", size = 542390, upload-time = "2026-04-24T15:49:21.991Z" }, ] [[package]] @@ -1600,7 +1758,7 @@ wheels = [ [[package]] name = "langchain-google-genai" -version = "4.2.1" +version = "4.2.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filetype" }, @@ -1608,23 +1766,23 @@ dependencies = [ { name = "langchain-core" }, { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/14/63/e7d148f903cebfef50109da71378f411166f068d66f79b9e16a62dbacf41/langchain_google_genai-4.2.1.tar.gz", hash = "sha256:7f44487a0337535897e3bba9a1d6605d722629e034f757ffa8755af0aa85daa8", size = 278288, upload-time = "2026-02-19T19:29:19.416Z" } +sdist = { url = "https://files.pythonhosted.org/packages/29/78/dfe068937338727b0dee637d971d59fe2fa275f9d0f0edee3fa80e811846/langchain_google_genai-4.2.2.tar.gz", hash = "sha256:5fc774bf41d1dc1c1a5ba8d7b9f2017dfa77e30653c9b44d2dfbaf0e877e7388", size = 267457, upload-time = "2026-04-15T15:08:32.18Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/7e/46c5973bd8b10a5c4c8a77136cf536e658796380a17c740246074901b038/langchain_google_genai-4.2.1-py3-none-any.whl", hash = "sha256:a7735289cf94ca3a684d830e09196aac8f6e75e647e3a0a1c3c9dc534ceb985e", size = 66500, upload-time = "2026-02-19T19:29:18.002Z" }, + { url = "https://files.pythonhosted.org/packages/3c/5c/adf81d68ab89b4cf505e690f8c1956d11b5969c831c951c7b4b1b1818080/langchain_google_genai-4.2.2-py3-none-any.whl", hash = "sha256:c8d09aac0304d26f1c2483e41a350f15587af1fbe034c39a304e1e17a3b743f3", size = 67605, upload-time = "2026-04-15T15:08:31.346Z" }, ] [[package]] name = "langchain-mcp-adapters" -version = "0.2.1" +version = "0.2.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "mcp" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d9/52/cebf0ef5b1acef6cbc63d671171d43af70f12d19f55577909c7afa79fb6e/langchain_mcp_adapters-0.2.1.tar.gz", hash = "sha256:58e64c44e8df29ca7eb3b656cf8c9931ef64386534d7ca261982e3bdc63f3176", size = 36394, upload-time = "2025-12-09T16:28:38.98Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0e/66/1cc7039e2daaddcdea9d8887851fe6eb67401925999b2aa394aa855c7132/langchain_mcp_adapters-0.2.2.tar.gz", hash = "sha256:12d39e91ae4389c54b61b221094e53850b6e152934d8bc10c80665d600e76530", size = 37942, upload-time = "2026-03-16T17:13:30.35Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/81/b2479eb26861ab36be851026d004b2d391d789b7856e44c272b12828ece0/langchain_mcp_adapters-0.2.1-py3-none-any.whl", hash = "sha256:9f96ad4c64230f6757297fec06fde19d772c99dbdfbca987f7b7cfd51ff77240", size = 22708, upload-time = "2025-12-09T16:28:37.877Z" }, + { url = "https://files.pythonhosted.org/packages/7d/2f/15d5e6c1765d8404a9cce38d8c81d7b33fb3392f9db5b992c000dddbd2a3/langchain_mcp_adapters-0.2.2-py3-none-any.whl", hash = "sha256:d08e64954e86281002653071b7430e0377c9a577cb4ac3143abfeb3e24ef8797", size = 23288, upload-time = "2026-03-16T17:13:29.073Z" }, ] [[package]] @@ -1642,21 +1800,33 @@ wheels = [ [[package]] name = "langchain-openai" -version = "1.1.7" +version = "1.2.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "openai" }, { name = "tiktoken" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/b7/30bfc4d1b658a9ee524bcce3b0b2ec9c45a11c853a13c4f0c9da9882784b/langchain_openai-1.1.7.tar.gz", hash = "sha256:f5ec31961ed24777548b63a5fe313548bc6e0eb9730d6552b8c6418765254c81", size = 1039134, upload-time = "2026-01-07T19:44:59.728Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/0e/d8e16c28aa67106d285e63b8ffc04c5af68341e345ce24a0751dbf2e167e/langchain_openai-1.2.1.tar.gz", hash = "sha256:ee4480b787706361b7125fad46930589a624df87aa158c6986ef1fad10d10675", size = 1146092, upload-time = "2026-04-24T19:46:43.328Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/a1/50e7596aca775d8c3883eceeaf47489fac26c57c1abe243c00174f715a8a/langchain_openai-1.1.7-py3-none-any.whl", hash = "sha256:34e9cd686aac1a120d6472804422792bf8080a2103b5d21ee450c9e42d053815", size = 84753, upload-time = "2026-01-07T19:44:58.629Z" }, + { url = "https://files.pythonhosted.org/packages/dc/55/2865b18ee3a3dd11160b8c4b2cf37e75bf2a4a8d1d38868ffffc7b7cc180/langchain_openai-1.2.1-py3-none-any.whl", hash = "sha256:a80732185030d4f453dda6c25feef46f645f665423fdffe38ae3edf1ac3c6c4d", size = 98626, upload-time = "2026-04-24T19:46:41.971Z" }, +] + +[[package]] +name = "langchain-protocol" +version = "0.0.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5c/51/1157009b6f94e6e58be58fa8b620187d657909a8b36a6bf5b0c52a2711f6/langchain_protocol-0.0.12.tar.gz", hash = "sha256:5e14c434290a705c9510fdb1a83ecf7561a5e6e0dfd053930ade80dba069269f", size = 6408, upload-time = "2026-04-25T01:05:01.489Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/82/3431e3061c917439589fa88a6b23c9bc0e154cba0f05d2e895a68c76ff74/langchain_protocol-0.0.12-py3-none-any.whl", hash = "sha256:402b61f42d4139692528cf37226c367bb6efc8ff8165b29380accb0abfece7b2", size = 6639, upload-time = "2026-04-25T01:05:00.487Z" }, ] [[package]] name = "langfuse" -version = "4.0.5" +version = "4.5.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff" }, @@ -1668,14 +1838,14 @@ dependencies = [ { name = "pydantic" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f9/de/b319a127e231e6ac10fad7a75e040b0c961669d9aa1f372f131d48ee4835/langfuse-4.0.5.tar.gz", hash = "sha256:f07fc88526d0699b3696df6ff606bc3c509c86419b5f551dea3d95ed31b4b7f8", size = 273892, upload-time = "2026-04-01T11:05:48.135Z" } +sdist = { url = "https://files.pythonhosted.org/packages/48/bd/9b12c9dd3ae1883619b20daa6d60f20a780ce2d25564d9b2168db27cbeb0/langfuse-4.5.1.tar.gz", hash = "sha256:fe8f9219f4101c0921934b0aeb1b45834f8e7d248e5f830b2c89c5b40aea6d83", size = 279735, upload-time = "2026-04-24T15:21:43.976Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/92/b4699c9ce5f2e1ab04e7fc1c656cc14a522f10f2c7170d6e427013ce0d37/langfuse-4.0.5-py3-none-any.whl", hash = "sha256:48ef89fec839b40f0f0e68b26c160e7bc0178cf10c8e53932895f4aed428b4df", size = 472730, upload-time = "2026-04-01T11:05:46.948Z" }, + { url = "https://files.pythonhosted.org/packages/2b/63/77bd7220dfd60885a272a851f780b3f83e0f653ee3a852347552c3e24a28/langfuse-4.5.1-py3-none-any.whl", hash = "sha256:5923cafe8289c9e3c53cb6992f4b46ec3132473b9f9eb65eb33ad28e2682db81", size = 479527, upload-time = "2026-04-24T15:21:45.568Z" }, ] [[package]] name = "langgraph" -version = "1.0.9" +version = "1.1.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, @@ -1685,14 +1855,14 @@ dependencies = [ { name = "pydantic" }, { name = "xxhash" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dc/63/69373a6721f30026ffa462a62084b11ed4bb5a201d1672366e13a89532f3/langgraph-1.0.9.tar.gz", hash = "sha256:feac2729faba7d3c325bef76f240d7d7f66b02d2cbf4fdb1ed7d0cc83f963651", size = 502800, upload-time = "2026-02-19T18:19:45.228Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/d5/9d9c65d5500a1ca7ea63d6d65aecfb248037018a74d7d4ef52e276bb4e4b/langgraph-1.1.9.tar.gz", hash = "sha256:bc5a49d5a5e71fda1f9c53c06c62f4caec9a95545b739d130a58b6ab3269e274", size = 560717, upload-time = "2026-04-21T13:43:06.809Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/23/a2/562a6c2430085c2c29b23c1e1d12233bf41a64e9a9832eda7573af3666cf/langgraph-1.0.9-py3-none-any.whl", hash = "sha256:bce0d1f3e9a20434215a2a818395a58aedfc11c87bd6b52706c0db5c05ec44ec", size = 158150, upload-time = "2026-02-19T18:19:43.913Z" }, + { url = "https://files.pythonhosted.org/packages/16/58/0380420e66619d12c992c1f8cfda0c7a04e8f0fe8a84752245b9e7b1cba7/langgraph-1.1.9-py3-none-any.whl", hash = "sha256:7db13ceecde4ea643df6c097dcc9e534895dcd9fcc6500eeff2f2cde0fab16b2", size = 173744, upload-time = "2026-04-21T13:43:05.513Z" }, ] [[package]] name = "langgraph-api" -version = "0.7.65" +version = "0.8.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cloudpickle" }, @@ -1700,6 +1870,7 @@ dependencies = [ { name = "grpcio" }, { name = "grpcio-health-checking" }, { name = "grpcio-tools" }, + { name = "httptools", marker = "sys_platform != 'win32'" }, { name = "httpx" }, { name = "jsonschema-rs" }, { name = "langchain-core" }, @@ -1721,24 +1892,41 @@ dependencies = [ { name = "truststore" }, { name = "uuid-utils" }, { name = "uvicorn" }, + { name = "uvloop", marker = "sys_platform != 'win32'" }, { name = "watchfiles" }, + { name = "zstandard" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/36/6d751d6f7becea1008ab963bc2f89a4c76dd2ae707399162b6950bfa7d4c/langgraph_api-0.7.65.tar.gz", hash = "sha256:c7d49b87d60ef2e07ae1582ac62a601720a51be637a89740d2593221dcda6da0", size = 625227, upload-time = "2026-03-05T02:28:50.185Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/7a/a355ceb2d9eb804d9a0e74f3620a4578a397d00f12d748e4d876a105ea11/langgraph_api-0.8.1.tar.gz", hash = "sha256:51249d71c29d153c8e64aa1f3ff2717d8364d8d4ee500461a1b2fb7974f72237", size = 671799, upload-time = "2026-04-23T20:01:38.598Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/46/b0/afd71dc70177dedbc21579b148a76f67c5c4606ed31dcc6c78d67be58c18/langgraph_api-0.7.65-py3-none-any.whl", hash = "sha256:f32f39cb9ebe58152d9f2fa06541dbb296bf238b85d4fb9811f7a8549d2701be", size = 528075, upload-time = "2026-03-05T02:28:48.425Z" }, + { url = "https://files.pythonhosted.org/packages/d8/fe/7d5d68ce387902f752432e68ec34e60da93232b2c00cb5fa7464da03d16f/langgraph_api-0.8.1-py3-none-any.whl", hash = "sha256:36f5cba9db7b2db1d8571298ef741622aa0423443e4cff000dd04a38cb2ef769", size = 558980, upload-time = "2026-04-23T20:01:36.905Z" }, ] [[package]] name = "langgraph-checkpoint" -version = "4.0.0" +version = "4.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "ormsgpack" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/76/55a18c59dedf39688d72c4b06af73a5e3ea0d1a01bc867b88fbf0659f203/langgraph_checkpoint-4.0.0.tar.gz", hash = "sha256:814d1bd050fac029476558d8e68d87bce9009a0262d04a2c14b918255954a624", size = 137320, upload-time = "2026-01-12T20:30:26.38Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/f2/cf8086e1f1a3358d9228805614e72602c281b18307f3fae64a5b854aad2d/langgraph_checkpoint-4.0.2.tar.gz", hash = "sha256:4f6f99cba8e272deabf81b2d8cdc96582af07a57a6ad591cdf216bb310497039", size = 160810, upload-time = "2026-04-15T21:03:00.062Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/de/ddd53b7032e623f3c7bcdab2b44e8bf635e468f62e10e5ff1946f62c9356/langgraph_checkpoint-4.0.0-py3-none-any.whl", hash = "sha256:3fa9b2635a7c5ac28b338f631abf6a030c3b508b7b9ce17c22611513b589c784", size = 46329, upload-time = "2026-01-12T20:30:25.2Z" }, + { url = "https://files.pythonhosted.org/packages/b9/5a/6dba29dd89b0a46ae21c707da0f9d17e94f27d3e481ed15bc99d6bd20aa6/langgraph_checkpoint-4.0.2-py3-none-any.whl", hash = "sha256:59b0f29216128a629c58dd07c98aa004f82f51805d5573126ffb419b753ff253", size = 51000, upload-time = "2026-04-15T21:02:59.096Z" }, +] + +[[package]] +name = "langgraph-checkpoint-postgres" +version = "3.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langgraph-checkpoint" }, + { name = "orjson" }, + { name = "psycopg" }, + { name = "psycopg-pool" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/95/7a/8f439966643d32111248a225e6cb33a182d07c90de780c4dbfc1e0377832/langgraph_checkpoint_postgres-3.0.5.tar.gz", hash = "sha256:a8fd7278a63f4f849b5cbc7884a15ca8f41e7d5f7467d0a66b31e8c24492f7eb", size = 127856, upload-time = "2026-03-18T21:25:29.785Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/87/b0f98b33a67204bca9d5619bcd9574222f6b025cf3c125eedcec9a50ecbc/langgraph_checkpoint_postgres-3.0.5-py3-none-any.whl", hash = "sha256:86d7040a88fd70087eaafb72251d796696a0a2d856168f5c11ef620771411552", size = 42907, upload-time = "2026-03-18T21:25:28.75Z" }, ] [[package]] @@ -1757,33 +1945,36 @@ wheels = [ [[package]] name = "langgraph-cli" -version = "0.4.14" +version = "0.4.24" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, + { name = "httpx" }, { name = "langgraph-sdk" }, + { name = "pathspec" }, + { name = "python-dotenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/6a/c9e73635933b722e4fca31fbf49dbe23eb06efde459b3aabf5a2a6d192e5/langgraph_cli-0.4.14.tar.gz", hash = "sha256:ba6bc715651d85ba94d14d6c53db87b4bf478cf45d61a28af9c8dee629f3cf1f", size = 857457, upload-time = "2026-03-02T21:27:19.189Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/77/34ebed84736dacbf164617794c15cd9271c18773cf32eeb7086c8b7b6dfd/langgraph_cli-0.4.24.tar.gz", hash = "sha256:8f05f0aec38a5da3cb0e7250123530e83c0179d74be0021050bc5cd36ac0dafb", size = 1027613, upload-time = "2026-04-22T18:49:30.921Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/29/25fb20e2f2f5733edd52a5c7735a397a71763bdda05a74d1398221a49a41/langgraph_cli-0.4.14-py3-none-any.whl", hash = "sha256:42ad7e56b512a3c260b205f7623d2e2cc0b245463961810fcc44482200374b4b", size = 42116, upload-time = "2026-03-02T21:27:18.042Z" }, + { url = "https://files.pythonhosted.org/packages/12/89/c5b09ad2dffb411987529f32e81fe318ccef3c2fdff2442e7c25b05b108c/langgraph_cli-0.4.24-py3-none-any.whl", hash = "sha256:aaf4dbecd752391c1489864da3a8e0af08e6bb0684d6516007617ce0abe9404d", size = 75486, upload-time = "2026-04-22T18:49:29.888Z" }, ] [[package]] name = "langgraph-prebuilt" -version = "1.0.8" +version = "1.0.11" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "langgraph-checkpoint" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0d/06/dd61a5c2dce009d1b03b1d56f2a85b3127659fdddf5b3be5d8f1d60820fb/langgraph_prebuilt-1.0.8.tar.gz", hash = "sha256:0cd3cf5473ced8a6cd687cc5294e08d3de57529d8dd14fdc6ae4899549efcf69", size = 164442, upload-time = "2026-02-19T18:14:39.083Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/bb/0e0b3eb33b1f2f32f8810a49aa24b7d11a5b0ed45f679386095946a59557/langgraph_prebuilt-1.0.11.tar.gz", hash = "sha256:0e71545f706a134b6a80a2a56916562797b499e3e4ab6eed5ce89396ac03d322", size = 171759, upload-time = "2026-04-24T18:18:34.528Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/41/ec966424ad3f2ed3996d24079d3342c8cd6c0bd0653c12b2a917a685ec6c/langgraph_prebuilt-1.0.8-py3-none-any.whl", hash = "sha256:d16a731e591ba4470f3e313a319c7eee7dbc40895bcf15c821f985a3522a7ce0", size = 35648, upload-time = "2026-02-19T18:14:37.611Z" }, + { url = "https://files.pythonhosted.org/packages/f6/8c/f4c574cb75ae9b8a474215d03a029ea723c919f65771ca1c82fe532d0297/langgraph_prebuilt-1.0.11-py3-none-any.whl", hash = "sha256:7afbaf5d64959e452976664c75bb8ec24098d3510cf9c205919baf443e7342ec", size = 36832, upload-time = "2026-04-24T18:18:33.586Z" }, ] [[package]] name = "langgraph-runtime-inmem" -version = "0.26.0" +version = "0.28.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "blockbuster" }, @@ -1794,27 +1985,27 @@ dependencies = [ { name = "starlette" }, { name = "structlog" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/67/01/3a6265341cf4d7f6550543ec6917d8eb4a0cfba5fbe2669497a631762304/langgraph_runtime_inmem-0.26.0.tar.gz", hash = "sha256:b9c587d1339320a2a54a570a21aecaf59eebc4be07cef1d8a5b035f3f2c61d6a", size = 110402, upload-time = "2026-02-24T00:22:31.938Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/49/daf02ad6b9accd858a0e165d8beb94e5378fad12c17b29f8228d7a596be0/langgraph_runtime_inmem-0.28.0.tar.gz", hash = "sha256:e02536508b5c154f18a24240663af7c2389f43cb4fbbac995a2717b4cfe40d2f", size = 115393, upload-time = "2026-04-23T19:49:38.872Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/21/b2/69cb16fc83b94449ee6e80c73b8c522da59ef1f52714e06c3cff1e27eab8/langgraph_runtime_inmem-0.26.0-py3-none-any.whl", hash = "sha256:cd91fd9783be5aecb99888fd3aa0d6677ea74d973fbb306acd566c9323cbc7c1", size = 44241, upload-time = "2026-02-24T00:22:30.815Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b1/b600e38260da97f2d71a041b2730f97f51bb2ce62a98230a7fac5fc3ca02/langgraph_runtime_inmem-0.28.0-py3-none-any.whl", hash = "sha256:26adc3bf115eba0b52532d18a13a297631454bc9f2a0e1f9b77060583ba81fdf", size = 47625, upload-time = "2026-04-23T19:49:37.869Z" }, ] [[package]] name = "langgraph-sdk" -version = "0.3.9" +version = "0.3.13" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, { name = "orjson" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3a/bd/ca8ae5c6a34be6d4f7aa86016e010ff96b3a939456041565797952e3014d/langgraph_sdk-0.3.9.tar.gz", hash = "sha256:8be8958529b3f6d493ec248fdb46e539362efda75784654a42a7091d22504e0e", size = 184287, upload-time = "2026-02-24T18:39:03.276Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0e/db/77a45127dddcfea5e4256ba916182903e4c31dc4cfca305b8c386f0a9e53/langgraph_sdk-0.3.13.tar.gz", hash = "sha256:419ca5663eec3cec192ad194ac0647c0c826866b446073eb40f384f950986cd5", size = 196360, upload-time = "2026-04-07T20:34:18.766Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/4c/7a7510260fbda788efd13bf4650d3e7d80988118441ac811ec78e0aa03ac/langgraph_sdk-0.3.9-py3-none-any.whl", hash = "sha256:94654294250c920789b6ed0d8a70c0117fed5736b61efc24ff647157359453c5", size = 90511, upload-time = "2026-02-24T18:39:02.012Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ef/64d64e9f8eea47ce7b939aa6da6863b674c8d418647813c20111645fcc62/langgraph_sdk-0.3.13-py3-none-any.whl", hash = "sha256:aee09e345c90775f6de9d6f4c7b847cfc652e49055c27a2aed0d981af2af3bd0", size = 96668, upload-time = "2026-04-07T20:34:17.866Z" }, ] [[package]] name = "langsmith" -version = "0.7.31" +version = "0.7.36" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, @@ -1827,9 +2018,9 @@ dependencies = [ { name = "xxhash" }, { name = "zstandard" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e6/11/696019490992db5c87774dc20515529ef42a01e1d770fb754ed6d9b12fb0/langsmith-0.7.31.tar.gz", hash = "sha256:331ee4f7c26bb5be4022b9859b7d7b122cbf8c9d01d9f530114c1914b0349ffb", size = 1178480, upload-time = "2026-04-14T17:55:41.242Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/4c/5f20508000ee0559bfa713b85c431b1cdc95d2913247ff9eb318e7fdff7b/langsmith-0.7.36.tar.gz", hash = "sha256:d18ef34819e0a252cf52c74ce6e9bd5de6deea4f85a3aef50abc9f48d8c5f8b8", size = 4402322, upload-time = "2026-04-24T16:58:06.681Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/a1/a013cf458c301cda86a213dd153ce0a01c93f1ab5833f951e6a44c9763ce/langsmith-0.7.31-py3-none-any.whl", hash = "sha256:0291d49203f6e80dda011af1afda61eb0595a4d697adb684590a8805e1d61fb6", size = 373276, upload-time = "2026-04-14T17:55:39.677Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8d/3ca31ae3a4a437191243ad6d9061ede9367440bb7dc9a0da1ecc2c2a4865/langsmith-0.7.36-py3-none-any.whl", hash = "sha256:e1657a795f3f1982bb8d34c98b143b630ca3eee9de2c10e670c9105233b54654", size = 381808, upload-time = "2026-04-24T16:58:04.572Z" }, ] [package.optional-dependencies] @@ -1841,7 +2032,7 @@ otel = [ [[package]] name = "lark-oapi" -version = "1.5.3" +version = "1.5.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, @@ -1851,7 +2042,7 @@ dependencies = [ { name = "websockets" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/ff/2ece5d735ebfa2af600a53176f2636ae47af2bf934e08effab64f0d1e047/lark_oapi-1.5.3-py3-none-any.whl", hash = "sha256:fda6b32bb38d21b6bdaae94979c600b94c7c521e985adade63a54e4b3e20cc36", size = 6993016, upload-time = "2026-01-27T08:21:49.307Z" }, + { url = "https://files.pythonhosted.org/packages/b4/72/c2e973066da57e9f6720c229364e673d89c884fac65c265a08e2c32eed3c/lark_oapi-1.5.5-py3-none-any.whl", hash = "sha256:c953d3f87e5b43d9e99cdee7c2d962568ac05d5c01ef57ad662fbb5d4ec0e69f", size = 6995394, upload-time = "2026-04-21T04:00:42.216Z" }, ] [[package]] @@ -1952,6 +2143,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7b/9e/f8ee7d644affa3b80efdd623a3d75865c8f058f3950cb87fb0c48e3559bc/magika-0.6.3-py3-none-win_amd64.whl", hash = "sha256:e57f75674447b20cab4db928ae58ab264d7d8582b55183a0b876711c2b2787f3", size = 12692831, upload-time = "2025-10-30T15:22:32.063Z" }, ] +[[package]] +name = "mako" +version = "1.3.11" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/59/8a/805404d0c0b9f3d7a326475ca008db57aea9c5c9f2e1e39ed0faa335571c/mako-1.3.11.tar.gz", hash = "sha256:071eb4ab4c5010443152255d77db7faa6ce5916f35226eb02dc34479b6858069", size = 399811, upload-time = "2026-04-14T20:19:51.493Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/a5/19d7aaa7e433713ffe881df33705925a196afb9532efc8475d26593921a6/mako-1.3.11-py3-none-any.whl", hash = "sha256:e372c6e333cf004aa736a15f425087ec977e1fcbd2966aae7f17c8dc1da27a77", size = 78503, upload-time = "2026-04-14T20:19:53.233Z" }, +] + [[package]] name = "mammoth" version = "1.11.0" @@ -1966,11 +2169,11 @@ wheels = [ [[package]] name = "markdown-to-mrkdwn" -version = "0.3.1" +version = "0.3.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/17/8e/f2c62a88097425b0dba3a8699d13154b4c5888b989ffaf6419c10058b338/markdown_to_mrkdwn-0.3.1.tar.gz", hash = "sha256:25f5c095516f8ad956c88c5dab75493aadfaa02e51e3c84459490058a8ca840b", size = 14191, upload-time = "2026-01-05T14:37:29.276Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/ab/be87702b4ccb9554b4d4fea399c046fb504942a6ddc4e88b2d83f975cf50/markdown_to_mrkdwn-0.3.2.tar.gz", hash = "sha256:70df595bd51020b4bc8fc20692488bb9031b0f70a682a14fe3b593a82bcc8c79", size = 14221, upload-time = "2026-03-10T11:03:31.027Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/92/ce0a08fb9769a13be550a7079c3409300ca6eb14ccc9038f67ac44deeef4/markdown_to_mrkdwn-0.3.1-py3-none-any.whl", hash = "sha256:5a6d08f1eaa08aea66953ef0eba206e0bb244d5c62880c76d1e3a11ee46cd3f0", size = 13592, upload-time = "2026-01-05T14:37:28.21Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/31a8d2a1662cdf61f3fcfa6e219bc52f176cb00dfc30e21cef5f869ec905/markdown_to_mrkdwn-0.3.2-py3-none-any.whl", hash = "sha256:50c523594a70ef2891c3871074059539f596041d6eab767335e60ddcdc91e94f", size = 13646, upload-time = "2026-03-10T11:03:29.905Z" }, ] [[package]] @@ -1988,7 +2191,7 @@ wheels = [ [[package]] name = "markitdown" -version = "0.1.5b1" +version = "0.1.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beautifulsoup4" }, @@ -1996,12 +2199,11 @@ dependencies = [ { name = "defusedxml" }, { name = "magika" }, { name = "markdownify" }, - { name = "onnxruntime", marker = "sys_platform == 'win32'" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/23/ae/c1f5e3fbd63883f27aec6bb6f8af2852253928bc673617bd02f2740a3057/markitdown-0.1.5b1.tar.gz", hash = "sha256:d5bffdb9d7aff9cac423a6d80d9e3a970937cec61338167bd855555f8a1c591c", size = 44408, upload-time = "2026-01-08T23:20:09.242Z" } +sdist = { url = "https://files.pythonhosted.org/packages/83/93/3b93c291c99d09f64f7535ba74c1c6a3507cf49cffd38983a55de6f834b6/markitdown-0.1.5.tar.gz", hash = "sha256:4c956ff1528bf15e1814542035ec96e989206d19d311bb799f4df973ecafc31a", size = 45099, upload-time = "2026-02-20T19:45:23.886Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/10/e3a4e65265c239b15b34199ca340b56b1163ffbc8d0a658a734f7fa2f8b3/markitdown-0.1.5b1-py3-none-any.whl", hash = "sha256:31b667ce9858bc7ff50b7c7aec5fab2c3103d3ca2cb69203b3edabdda5d3a568", size = 62710, upload-time = "2026-01-08T23:20:10.672Z" }, + { url = "https://files.pythonhosted.org/packages/b1/8b/fd7e042455a829a1ede0bc8e9e3061aa6c7c4cf745385526ef62ff1b5a5b/markitdown-0.1.5-py3-none-any.whl", hash = "sha256:5180a9a841e20fc01c2c09dbc5d039638429bbebcdc2af1b2615c3c427840434", size = 63402, upload-time = "2026-02-20T19:45:27.195Z" }, ] [package.optional-dependencies] @@ -2026,9 +2228,72 @@ xlsx = [ { name = "pandas" }, ] +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + [[package]] name = "mcp" -version = "1.25.0" +version = "1.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -2046,9 +2311,9 @@ dependencies = [ { name = "typing-inspection" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/eb/c0cfc62075dc6e1ec1c64d352ae09ac051d9334311ed226f1f425312848a/mcp-1.27.0.tar.gz", hash = "sha256:d3dc35a7eec0d458c1da4976a48f982097ddaab87e278c5511d5a4a56e852b83", size = 607509, upload-time = "2026-04-02T14:48:08.88Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076, upload-time = "2025-12-19T10:19:55.416Z" }, + { url = "https://files.pythonhosted.org/packages/9c/46/f6b4ad632c67ef35209a66127e4bddc95759649dd595f71f13fba11bdf9a/mcp-1.27.0-py3-none-any.whl", hash = "sha256:5ce1fa81614958e267b21fb2aa34e0aea8e2c6ede60d52aba45fd47246b4d741", size = 215967, upload-time = "2026-04-02T14:48:07.24Z" }, ] [[package]] @@ -2062,16 +2327,16 @@ wheels = [ [[package]] name = "msal" -version = "1.34.0" +version = "1.36.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, { name = "pyjwt", extra = ["crypto"] }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961, upload-time = "2025-09-22T23:05:48.989Z" } +sdist = { url = "https://files.pythonhosted.org/packages/de/cb/b02b0f748ac668922364ccb3c3bff5b71628a05f5adfec2ba2a5c3031483/msal-1.36.0.tar.gz", hash = "sha256:3f6a4af2b036b476a4215111c4297b4e6e236ed186cd804faefba23e4990978b", size = 174217, upload-time = "2026-04-09T10:20:33.525Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987, upload-time = "2025-09-22T23:05:47.294Z" }, + { url = "https://files.pythonhosted.org/packages/2a/d3/414d1f0a5f6f4fe5313c2b002c54e78a3332970feb3f5fed14237aa17064/msal-1.36.0-py3-none-any.whl", hash = "sha256:36ecac30e2ff4322d956029aabce3c82301c29f0acb1ad89b94edcabb0e58ec4", size = 121547, upload-time = "2026-04-09T10:20:32.336Z" }, ] [[package]] @@ -2205,63 +2470,63 @@ wheels = [ [[package]] name = "numpy" -version = "2.4.1" +version = "2.4.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/62/ae72ff66c0f1fd959925b4c11f8c2dea61f47f6acaea75a08512cdfe3fed/numpy-2.4.1.tar.gz", hash = "sha256:a1ceafc5042451a858231588a104093474c6a5c57dcc724841f5c888d237d690", size = 20721320, upload-time = "2026-01-10T06:44:59.619Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/9f/b8cef5bffa569759033adda9481211426f12f53299629b410340795c2514/numpy-2.4.4.tar.gz", hash = "sha256:2d390634c5182175533585cc89f3608a4682ccb173cc9bb940b2881c8d6f8fa0", size = 20731587, upload-time = "2026-03-29T13:22:01.298Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/7f/ec53e32bf10c813604edf07a3682616bd931d026fcde7b6d13195dfb684a/numpy-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d3703409aac693fa82c0aee023a1ae06a6e9d065dba10f5e8e80f642f1e9d0a2", size = 16656888, upload-time = "2026-01-10T06:42:40.913Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e0/1f9585d7dae8f14864e948fd7fa86c6cb72dee2676ca2748e63b1c5acfe0/numpy-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7211b95ca365519d3596a1d8688a95874cc94219d417504d9ecb2df99fa7bfa8", size = 12373956, upload-time = "2026-01-10T06:42:43.091Z" }, - { url = "https://files.pythonhosted.org/packages/8e/43/9762e88909ff2326f5e7536fa8cb3c49fb03a7d92705f23e6e7f553d9cb3/numpy-2.4.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5adf01965456a664fc727ed69cc71848f28d063217c63e1a0e200a118d5eec9a", size = 5202567, upload-time = "2026-01-10T06:42:45.107Z" }, - { url = "https://files.pythonhosted.org/packages/4b/ee/34b7930eb61e79feb4478800a4b95b46566969d837546aa7c034c742ef98/numpy-2.4.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:26f0bcd9c79a00e339565b303badc74d3ea2bd6d52191eeca5f95936cad107d0", size = 6549459, upload-time = "2026-01-10T06:42:48.152Z" }, - { url = "https://files.pythonhosted.org/packages/79/e3/5f115fae982565771be994867c89bcd8d7208dbfe9469185497d70de5ddf/numpy-2.4.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0093e85df2960d7e4049664b26afc58b03236e967fb942354deef3208857a04c", size = 14404859, upload-time = "2026-01-10T06:42:49.947Z" }, - { url = "https://files.pythonhosted.org/packages/d9/7d/9c8a781c88933725445a859cac5d01b5871588a15969ee6aeb618ba99eee/numpy-2.4.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad270f438cbdd402c364980317fb6b117d9ec5e226fff5b4148dd9aa9fc6e02", size = 16371419, upload-time = "2026-01-10T06:42:52.409Z" }, - { url = "https://files.pythonhosted.org/packages/a6/d2/8aa084818554543f17cf4162c42f162acbd3bb42688aefdba6628a859f77/numpy-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:297c72b1b98100c2e8f873d5d35fb551fce7040ade83d67dd51d38c8d42a2162", size = 16182131, upload-time = "2026-01-10T06:42:54.694Z" }, - { url = "https://files.pythonhosted.org/packages/60/db/0425216684297c58a8df35f3284ef56ec4a043e6d283f8a59c53562caf1b/numpy-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf6470d91d34bf669f61d515499859fa7a4c2f7c36434afb70e82df7217933f9", size = 18295342, upload-time = "2026-01-10T06:42:56.991Z" }, - { url = "https://files.pythonhosted.org/packages/31/4c/14cb9d86240bd8c386c881bafbe43f001284b7cce3bc01623ac9475da163/numpy-2.4.1-cp312-cp312-win32.whl", hash = "sha256:b6bcf39112e956594b3331316d90c90c90fb961e39696bda97b89462f5f3943f", size = 5959015, upload-time = "2026-01-10T06:42:59.631Z" }, - { url = "https://files.pythonhosted.org/packages/51/cf/52a703dbeb0c65807540d29699fef5fda073434ff61846a564d5c296420f/numpy-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:e1a27bb1b2dee45a2a53f5ca6ff2d1a7f135287883a1689e930d44d1ff296c87", size = 12310730, upload-time = "2026-01-10T06:43:01.627Z" }, - { url = "https://files.pythonhosted.org/packages/69/80/a828b2d0ade5e74a9fe0f4e0a17c30fdc26232ad2bc8c9f8b3197cf7cf18/numpy-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:0e6e8f9d9ecf95399982019c01223dc130542960a12edfa8edd1122dfa66a8a8", size = 10312166, upload-time = "2026-01-10T06:43:03.673Z" }, - { url = "https://files.pythonhosted.org/packages/04/68/732d4b7811c00775f3bd522a21e8dd5a23f77eb11acdeb663e4a4ebf0ef4/numpy-2.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d797454e37570cfd61143b73b8debd623c3c0952959adb817dd310a483d58a1b", size = 16652495, upload-time = "2026-01-10T06:43:06.283Z" }, - { url = "https://files.pythonhosted.org/packages/20/ca/857722353421a27f1465652b2c66813eeeccea9d76d5f7b74b99f298e60e/numpy-2.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c55962006156aeef1629b953fd359064aa47e4d82cfc8e67f0918f7da3344f", size = 12368657, upload-time = "2026-01-10T06:43:09.094Z" }, - { url = "https://files.pythonhosted.org/packages/81/0d/2377c917513449cc6240031a79d30eb9a163d32a91e79e0da47c43f2c0c8/numpy-2.4.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:71abbea030f2cfc3092a0ff9f8c8fdefdc5e0bf7d9d9c99663538bb0ecdac0b9", size = 5197256, upload-time = "2026-01-10T06:43:13.634Z" }, - { url = "https://files.pythonhosted.org/packages/17/39/569452228de3f5de9064ac75137082c6214be1f5c532016549a7923ab4b5/numpy-2.4.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5b55aa56165b17aaf15520beb9cbd33c9039810e0d9643dd4379e44294c7303e", size = 6545212, upload-time = "2026-01-10T06:43:15.661Z" }, - { url = "https://files.pythonhosted.org/packages/8c/a4/77333f4d1e4dac4395385482557aeecf4826e6ff517e32ca48e1dafbe42a/numpy-2.4.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0faba4a331195bfa96f93dd9dfaa10b2c7aa8cda3a02b7fd635e588fe821bf5", size = 14402871, upload-time = "2026-01-10T06:43:17.324Z" }, - { url = "https://files.pythonhosted.org/packages/ba/87/d341e519956273b39d8d47969dd1eaa1af740615394fe67d06f1efa68773/numpy-2.4.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e3087f53e2b4428766b54932644d148613c5a595150533ae7f00dab2f319a8", size = 16359305, upload-time = "2026-01-10T06:43:19.376Z" }, - { url = "https://files.pythonhosted.org/packages/32/91/789132c6666288eaa20ae8066bb99eba1939362e8f1a534949a215246e97/numpy-2.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:49e792ec351315e16da54b543db06ca8a86985ab682602d90c60ef4ff4db2a9c", size = 16181909, upload-time = "2026-01-10T06:43:21.808Z" }, - { url = "https://files.pythonhosted.org/packages/cf/b8/090b8bd27b82a844bb22ff8fdf7935cb1980b48d6e439ae116f53cdc2143/numpy-2.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79e9e06c4c2379db47f3f6fc7a8652e7498251789bf8ff5bd43bf478ef314ca2", size = 18284380, upload-time = "2026-01-10T06:43:23.957Z" }, - { url = "https://files.pythonhosted.org/packages/67/78/722b62bd31842ff029412271556a1a27a98f45359dea78b1548a3a9996aa/numpy-2.4.1-cp313-cp313-win32.whl", hash = "sha256:3d1a100e48cb266090a031397863ff8a30050ceefd798f686ff92c67a486753d", size = 5957089, upload-time = "2026-01-10T06:43:27.535Z" }, - { url = "https://files.pythonhosted.org/packages/da/a6/cf32198b0b6e18d4fbfa9a21a992a7fca535b9bb2b0cdd217d4a3445b5ca/numpy-2.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:92a0e65272fd60bfa0d9278e0484c2f52fe03b97aedc02b357f33fe752c52ffb", size = 12307230, upload-time = "2026-01-10T06:43:29.298Z" }, - { url = "https://files.pythonhosted.org/packages/44/6c/534d692bfb7d0afe30611320c5fb713659dcb5104d7cc182aff2aea092f5/numpy-2.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:20d4649c773f66cc2fc36f663e091f57c3b7655f936a4c681b4250855d1da8f5", size = 10313125, upload-time = "2026-01-10T06:43:31.782Z" }, - { url = "https://files.pythonhosted.org/packages/da/a1/354583ac5c4caa566de6ddfbc42744409b515039e085fab6e0ff942e0df5/numpy-2.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f93bc6892fe7b0663e5ffa83b61aab510aacffd58c16e012bb9352d489d90cb7", size = 12496156, upload-time = "2026-01-10T06:43:34.237Z" }, - { url = "https://files.pythonhosted.org/packages/51/b0/42807c6e8cce58c00127b1dc24d365305189991f2a7917aa694a109c8d7d/numpy-2.4.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:178de8f87948163d98a4c9ab5bee4ce6519ca918926ec8df195af582de28544d", size = 5324663, upload-time = "2026-01-10T06:43:36.211Z" }, - { url = "https://files.pythonhosted.org/packages/fe/55/7a621694010d92375ed82f312b2f28017694ed784775269115323e37f5e2/numpy-2.4.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:98b35775e03ab7f868908b524fc0a84d38932d8daf7b7e1c3c3a1b6c7a2c9f15", size = 6645224, upload-time = "2026-01-10T06:43:37.884Z" }, - { url = "https://files.pythonhosted.org/packages/50/96/9fa8635ed9d7c847d87e30c834f7109fac5e88549d79ef3324ab5c20919f/numpy-2.4.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:941c2a93313d030f219f3a71fd3d91a728b82979a5e8034eb2e60d394a2b83f9", size = 14462352, upload-time = "2026-01-10T06:43:39.479Z" }, - { url = "https://files.pythonhosted.org/packages/03/d1/8cf62d8bb2062da4fb82dd5d49e47c923f9c0738032f054e0a75342faba7/numpy-2.4.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:529050522e983e00a6c1c6b67411083630de8b57f65e853d7b03d9281b8694d2", size = 16407279, upload-time = "2026-01-10T06:43:41.93Z" }, - { url = "https://files.pythonhosted.org/packages/86/1c/95c86e17c6b0b31ce6ef219da00f71113b220bcb14938c8d9a05cee0ff53/numpy-2.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2302dc0224c1cbc49bb94f7064f3f923a971bfae45c33870dcbff63a2a550505", size = 16248316, upload-time = "2026-01-10T06:43:44.121Z" }, - { url = "https://files.pythonhosted.org/packages/30/b4/e7f5ff8697274c9d0fa82398b6a372a27e5cef069b37df6355ccb1f1db1a/numpy-2.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9171a42fcad32dcf3fa86f0a4faa5e9f8facefdb276f54b8b390d90447cff4e2", size = 18329884, upload-time = "2026-01-10T06:43:46.613Z" }, - { url = "https://files.pythonhosted.org/packages/37/a4/b073f3e9d77f9aec8debe8ca7f9f6a09e888ad1ba7488f0c3b36a94c03ac/numpy-2.4.1-cp313-cp313t-win32.whl", hash = "sha256:382ad67d99ef49024f11d1ce5dcb5ad8432446e4246a4b014418ba3a1175a1f4", size = 6081138, upload-time = "2026-01-10T06:43:48.854Z" }, - { url = "https://files.pythonhosted.org/packages/16/16/af42337b53844e67752a092481ab869c0523bc95c4e5c98e4dac4e9581ac/numpy-2.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:62fea415f83ad8fdb6c20840578e5fbaf5ddd65e0ec6c3c47eda0f69da172510", size = 12447478, upload-time = "2026-01-10T06:43:50.476Z" }, - { url = "https://files.pythonhosted.org/packages/6c/f8/fa85b2eac68ec631d0b631abc448552cb17d39afd17ec53dcbcc3537681a/numpy-2.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:a7870e8c5fc11aef57d6fea4b4085e537a3a60ad2cdd14322ed531fdca68d261", size = 10382981, upload-time = "2026-01-10T06:43:52.575Z" }, - { url = "https://files.pythonhosted.org/packages/1b/a7/ef08d25698e0e4b4efbad8d55251d20fe2a15f6d9aa7c9b30cd03c165e6f/numpy-2.4.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3869ea1ee1a1edc16c29bbe3a2f2a4e515cc3a44d43903ad41e0cacdbaf733dc", size = 16652046, upload-time = "2026-01-10T06:43:54.797Z" }, - { url = "https://files.pythonhosted.org/packages/8f/39/e378b3e3ca13477e5ac70293ec027c438d1927f18637e396fe90b1addd72/numpy-2.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e867df947d427cdd7a60e3e271729090b0f0df80f5f10ab7dd436f40811699c3", size = 12378858, upload-time = "2026-01-10T06:43:57.099Z" }, - { url = "https://files.pythonhosted.org/packages/c3/74/7ec6154f0006910ed1fdbb7591cf4432307033102b8a22041599935f8969/numpy-2.4.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:e3bd2cb07841166420d2fa7146c96ce00cb3410664cbc1a6be028e456c4ee220", size = 5207417, upload-time = "2026-01-10T06:43:59.037Z" }, - { url = "https://files.pythonhosted.org/packages/f7/b7/053ac11820d84e42f8feea5cb81cc4fcd1091499b45b1ed8c7415b1bf831/numpy-2.4.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:f0a90aba7d521e6954670550e561a4cb925713bd944445dbe9e729b71f6cabee", size = 6542643, upload-time = "2026-01-10T06:44:01.852Z" }, - { url = "https://files.pythonhosted.org/packages/c0/c4/2e7908915c0e32ca636b92e4e4a3bdec4cb1e7eb0f8aedf1ed3c68a0d8cd/numpy-2.4.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d558123217a83b2d1ba316b986e9248a1ed1971ad495963d555ccd75dcb1556", size = 14418963, upload-time = "2026-01-10T06:44:04.047Z" }, - { url = "https://files.pythonhosted.org/packages/eb/c0/3ed5083d94e7ffd7c404e54619c088e11f2e1939a9544f5397f4adb1b8ba/numpy-2.4.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f44de05659b67d20499cbc96d49f2650769afcb398b79b324bb6e297bfe3844", size = 16363811, upload-time = "2026-01-10T06:44:06.207Z" }, - { url = "https://files.pythonhosted.org/packages/0e/68/42b66f1852bf525050a67315a4fb94586ab7e9eaa541b1bef530fab0c5dd/numpy-2.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:69e7419c9012c4aaf695109564e3387f1259f001b4326dfa55907b098af082d3", size = 16197643, upload-time = "2026-01-10T06:44:08.33Z" }, - { url = "https://files.pythonhosted.org/packages/d2/40/e8714fc933d85f82c6bfc7b998a0649ad9769a32f3494ba86598aaf18a48/numpy-2.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2ffd257026eb1b34352e749d7cc1678b5eeec3e329ad8c9965a797e08ccba205", size = 18289601, upload-time = "2026-01-10T06:44:10.841Z" }, - { url = "https://files.pythonhosted.org/packages/80/9a/0d44b468cad50315127e884802351723daca7cf1c98d102929468c81d439/numpy-2.4.1-cp314-cp314-win32.whl", hash = "sha256:727c6c3275ddefa0dc078524a85e064c057b4f4e71ca5ca29a19163c607be745", size = 6005722, upload-time = "2026-01-10T06:44:13.332Z" }, - { url = "https://files.pythonhosted.org/packages/7e/bb/c6513edcce5a831810e2dddc0d3452ce84d208af92405a0c2e58fd8e7881/numpy-2.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:7d5d7999df434a038d75a748275cd6c0094b0ecdb0837342b332a82defc4dc4d", size = 12438590, upload-time = "2026-01-10T06:44:15.006Z" }, - { url = "https://files.pythonhosted.org/packages/e9/da/a598d5cb260780cf4d255102deba35c1d072dc028c4547832f45dd3323a8/numpy-2.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:ce9ce141a505053b3c7bce3216071f3bf5c182b8b28930f14cd24d43932cd2df", size = 10596180, upload-time = "2026-01-10T06:44:17.386Z" }, - { url = "https://files.pythonhosted.org/packages/de/bc/ea3f2c96fcb382311827231f911723aeff596364eb6e1b6d1d91128aa29b/numpy-2.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:4e53170557d37ae404bf8d542ca5b7c629d6efa1117dac6a83e394142ea0a43f", size = 12498774, upload-time = "2026-01-10T06:44:19.467Z" }, - { url = "https://files.pythonhosted.org/packages/aa/ab/ef9d939fe4a812648c7a712610b2ca6140b0853c5efea361301006c02ae5/numpy-2.4.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:a73044b752f5d34d4232f25f18160a1cc418ea4507f5f11e299d8ac36875f8a0", size = 5327274, upload-time = "2026-01-10T06:44:23.189Z" }, - { url = "https://files.pythonhosted.org/packages/bd/31/d381368e2a95c3b08b8cf7faac6004849e960f4a042d920337f71cef0cae/numpy-2.4.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:fb1461c99de4d040666ca0444057b06541e5642f800b71c56e6ea92d6a853a0c", size = 6648306, upload-time = "2026-01-10T06:44:25.012Z" }, - { url = "https://files.pythonhosted.org/packages/c8/e5/0989b44ade47430be6323d05c23207636d67d7362a1796ccbccac6773dd2/numpy-2.4.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:423797bdab2eeefbe608d7c1ec7b2b4fd3c58d51460f1ee26c7500a1d9c9ee93", size = 14464653, upload-time = "2026-01-10T06:44:26.706Z" }, - { url = "https://files.pythonhosted.org/packages/10/a7/cfbe475c35371cae1358e61f20c5f075badc18c4797ab4354140e1d283cf/numpy-2.4.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:52b5f61bdb323b566b528899cc7db2ba5d1015bda7ea811a8bcf3c89c331fa42", size = 16405144, upload-time = "2026-01-10T06:44:29.378Z" }, - { url = "https://files.pythonhosted.org/packages/f8/a3/0c63fe66b534888fa5177cc7cef061541064dbe2b4b60dcc60ffaf0d2157/numpy-2.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42d7dd5fa36d16d52a84f821eb96031836fd405ee6955dd732f2023724d0aa01", size = 16247425, upload-time = "2026-01-10T06:44:31.721Z" }, - { url = "https://files.pythonhosted.org/packages/6b/2b/55d980cfa2c93bd40ff4c290bf824d792bd41d2fe3487b07707559071760/numpy-2.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e7b6b5e28bbd47b7532698e5db2fe1db693d84b58c254e4389d99a27bb9b8f6b", size = 18330053, upload-time = "2026-01-10T06:44:34.617Z" }, - { url = "https://files.pythonhosted.org/packages/23/12/8b5fc6b9c487a09a7957188e0943c9ff08432c65e34567cabc1623b03a51/numpy-2.4.1-cp314-cp314t-win32.whl", hash = "sha256:5de60946f14ebe15e713a6f22850c2372fa72f4ff9a432ab44aa90edcadaa65a", size = 6152482, upload-time = "2026-01-10T06:44:36.798Z" }, - { url = "https://files.pythonhosted.org/packages/00/a5/9f8ca5856b8940492fc24fbe13c1bc34d65ddf4079097cf9e53164d094e1/numpy-2.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:8f085da926c0d491ffff3096f91078cc97ea67e7e6b65e490bc8dcda65663be2", size = 12627117, upload-time = "2026-01-10T06:44:38.828Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0d/eca3d962f9eef265f01a8e0d20085c6dd1f443cbffc11b6dede81fd82356/numpy-2.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:6436cffb4f2bf26c974344439439c95e152c9a527013f26b3577be6c2ca64295", size = 10667121, upload-time = "2026-01-10T06:44:41.644Z" }, + { url = "https://files.pythonhosted.org/packages/28/05/32396bec30fb2263770ee910142f49c1476d08e8ad41abf8403806b520ce/numpy-2.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15716cfef24d3a9762e3acdf87e27f58dc823d1348f765bbea6bef8c639bfa1b", size = 16689272, upload-time = "2026-03-29T13:18:49.223Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f3/a983d28637bfcd763a9c7aafdb6d5c0ebf3d487d1e1459ffdb57e2f01117/numpy-2.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23cbfd4c17357c81021f21540da84ee282b9c8fba38a03b7b9d09ba6b951421e", size = 14699573, upload-time = "2026-03-29T13:18:52.629Z" }, + { url = "https://files.pythonhosted.org/packages/9b/fd/e5ecca1e78c05106d98028114f5c00d3eddb41207686b2b7de3e477b0e22/numpy-2.4.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b3b60bb7cba2c8c81837661c488637eee696f59a877788a396d33150c35d842", size = 5204782, upload-time = "2026-03-29T13:18:55.579Z" }, + { url = "https://files.pythonhosted.org/packages/de/2f/702a4594413c1a8632092beae8aba00f1d67947389369b3777aed783fdca/numpy-2.4.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e4a010c27ff6f210ff4c6ef34394cd61470d01014439b192ec22552ee867f2a8", size = 6552038, upload-time = "2026-03-29T13:18:57.769Z" }, + { url = "https://files.pythonhosted.org/packages/7f/37/eed308a8f56cba4d1fdf467a4fc67ef4ff4bf1c888f5fc980481890104b1/numpy-2.4.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9e75681b59ddaa5e659898085ae0eaea229d054f2ac0c7e563a62205a700121", size = 15670666, upload-time = "2026-03-29T13:19:00.341Z" }, + { url = "https://files.pythonhosted.org/packages/0a/0d/0e3ecece05b7a7e87ab9fb587855548da437a061326fff64a223b6dcb78a/numpy-2.4.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:81f4a14bee47aec54f883e0cad2d73986640c1590eb9bfaaba7ad17394481e6e", size = 16645480, upload-time = "2026-03-29T13:19:03.63Z" }, + { url = "https://files.pythonhosted.org/packages/34/49/f2312c154b82a286758ee2f1743336d50651f8b5195db18cdb63675ff649/numpy-2.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:62d6b0f03b694173f9fcb1fb317f7222fd0b0b103e784c6549f5e53a27718c44", size = 17020036, upload-time = "2026-03-29T13:19:07.428Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e9/736d17bd77f1b0ec4f9901aaec129c00d59f5d84d5e79bba540ef12c2330/numpy-2.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fbc356aae7adf9e6336d336b9c8111d390a05df88f1805573ebb0807bd06fd1d", size = 18368643, upload-time = "2026-03-29T13:19:10.775Z" }, + { url = "https://files.pythonhosted.org/packages/63/f6/d417977c5f519b17c8a5c3bc9e8304b0908b0e21136fe43bf628a1343914/numpy-2.4.4-cp312-cp312-win32.whl", hash = "sha256:0d35aea54ad1d420c812bfa0385c71cd7cc5bcf7c65fed95fc2cd02fe8c79827", size = 5961117, upload-time = "2026-03-29T13:19:13.464Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5b/e1deebf88ff431b01b7406ca3583ab2bbb90972bbe1c568732e49c844f7e/numpy-2.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:b5f0362dc928a6ecd9db58868fca5e48485205e3855957bdedea308f8672ea4a", size = 12320584, upload-time = "2026-03-29T13:19:16.155Z" }, + { url = "https://files.pythonhosted.org/packages/58/89/e4e856ac82a68c3ed64486a544977d0e7bdd18b8da75b78a577ca31c4395/numpy-2.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:846300f379b5b12cc769334464656bc882e0735d27d9726568bc932fdc49d5ec", size = 10221450, upload-time = "2026-03-29T13:19:18.994Z" }, + { url = "https://files.pythonhosted.org/packages/14/1d/d0a583ce4fefcc3308806a749a536c201ed6b5ad6e1322e227ee4848979d/numpy-2.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:08f2e31ed5e6f04b118e49821397f12767934cfdd12a1ce86a058f91e004ee50", size = 16684933, upload-time = "2026-03-29T13:19:22.47Z" }, + { url = "https://files.pythonhosted.org/packages/c1/62/2b7a48fbb745d344742c0277f01286dead15f3f68e4f359fbfcf7b48f70f/numpy-2.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e823b8b6edc81e747526f70f71a9c0a07ac4e7ad13020aa736bb7c9d67196115", size = 14694532, upload-time = "2026-03-29T13:19:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/e5/87/499737bfba066b4a3bebff24a8f1c5b2dee410b209bc6668c9be692580f0/numpy-2.4.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4a19d9dba1a76618dd86b164d608566f393f8ec6ac7c44f0cc879011c45e65af", size = 5199661, upload-time = "2026-03-29T13:19:28.31Z" }, + { url = "https://files.pythonhosted.org/packages/cd/da/464d551604320d1491bc345efed99b4b7034143a85787aab78d5691d5a0e/numpy-2.4.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d2a8490669bfe99a233298348acc2d824d496dee0e66e31b66a6022c2ad74a5c", size = 6547539, upload-time = "2026-03-29T13:19:30.97Z" }, + { url = "https://files.pythonhosted.org/packages/7d/90/8d23e3b0dafd024bf31bdec225b3bb5c2dbfa6912f8a53b8659f21216cbf/numpy-2.4.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45dbed2ab436a9e826e302fcdcbe9133f9b0006e5af7168afb8963a6520da103", size = 15668806, upload-time = "2026-03-29T13:19:33.887Z" }, + { url = "https://files.pythonhosted.org/packages/d1/73/a9d864e42a01896bb5974475438f16086be9ba1f0d19d0bb7a07427c4a8b/numpy-2.4.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c901b15172510173f5cb310eae652908340f8dede90fff9e3bf6c0d8dfd92f83", size = 16632682, upload-time = "2026-03-29T13:19:37.336Z" }, + { url = "https://files.pythonhosted.org/packages/34/fb/14570d65c3bde4e202a031210475ae9cde9b7686a2e7dc97ee67d2833b35/numpy-2.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:99d838547ace2c4aace6c4f76e879ddfe02bb58a80c1549928477862b7a6d6ed", size = 17019810, upload-time = "2026-03-29T13:19:40.963Z" }, + { url = "https://files.pythonhosted.org/packages/8a/77/2ba9d87081fd41f6d640c83f26fb7351e536b7ce6dd9061b6af5904e8e46/numpy-2.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0aec54fd785890ecca25a6003fd9a5aed47ad607bbac5cd64f836ad8666f4959", size = 18357394, upload-time = "2026-03-29T13:19:44.859Z" }, + { url = "https://files.pythonhosted.org/packages/a2/23/52666c9a41708b0853fa3b1a12c90da38c507a3074883823126d4e9d5b30/numpy-2.4.4-cp313-cp313-win32.whl", hash = "sha256:07077278157d02f65c43b1b26a3886bce886f95d20aabd11f87932750dfb14ed", size = 5959556, upload-time = "2026-03-29T13:19:47.661Z" }, + { url = "https://files.pythonhosted.org/packages/57/fb/48649b4971cde70d817cf97a2a2fdc0b4d8308569f1dd2f2611959d2e0cf/numpy-2.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:5c70f1cc1c4efbe316a572e2d8b9b9cc44e89b95f79ca3331553fbb63716e2bf", size = 12317311, upload-time = "2026-03-29T13:19:50.67Z" }, + { url = "https://files.pythonhosted.org/packages/ba/d8/11490cddd564eb4de97b4579ef6bfe6a736cc07e94c1598590ae25415e01/numpy-2.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:ef4059d6e5152fa1a39f888e344c73fdc926e1b2dd58c771d67b0acfbf2aa67d", size = 10222060, upload-time = "2026-03-29T13:19:54.229Z" }, + { url = "https://files.pythonhosted.org/packages/99/5d/dab4339177a905aad3e2221c915b35202f1ec30d750dd2e5e9d9a72b804b/numpy-2.4.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4bbc7f303d125971f60ec0aaad5e12c62d0d2c925f0ab1273debd0e4ba37aba5", size = 14822302, upload-time = "2026-03-29T13:19:57.585Z" }, + { url = "https://files.pythonhosted.org/packages/eb/e4/0564a65e7d3d97562ed6f9b0fd0fb0a6f559ee444092f105938b50043876/numpy-2.4.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:4d6d57903571f86180eb98f8f0c839fa9ebbfb031356d87f1361be91e433f5b7", size = 5327407, upload-time = "2026-03-29T13:20:00.601Z" }, + { url = "https://files.pythonhosted.org/packages/29/8d/35a3a6ce5ad371afa58b4700f1c820f8f279948cca32524e0a695b0ded83/numpy-2.4.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:4636de7fd195197b7535f231b5de9e4b36d2c440b6e566d2e4e4746e6af0ca93", size = 6647631, upload-time = "2026-03-29T13:20:02.855Z" }, + { url = "https://files.pythonhosted.org/packages/f4/da/477731acbd5a58a946c736edfdabb2ac5b34c3d08d1ba1a7b437fa0884df/numpy-2.4.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ad2e2ef14e0b04e544ea2fa0a36463f847f113d314aa02e5b402fdf910ef309e", size = 15727691, upload-time = "2026-03-29T13:20:06.004Z" }, + { url = "https://files.pythonhosted.org/packages/e6/db/338535d9b152beabeb511579598418ba0212ce77cf9718edd70262cc4370/numpy-2.4.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a285b3b96f951841799528cd1f4f01cd70e7e0204b4abebac9463eecfcf2a40", size = 16681241, upload-time = "2026-03-29T13:20:09.417Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a9/ad248e8f58beb7a0219b413c9c7d8151c5d285f7f946c3e26695bdbbe2df/numpy-2.4.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f8474c4241bc18b750be2abea9d7a9ec84f46ef861dbacf86a4f6e043401f79e", size = 17085767, upload-time = "2026-03-29T13:20:13.126Z" }, + { url = "https://files.pythonhosted.org/packages/b5/1a/3b88ccd3694681356f70da841630e4725a7264d6a885c8d442a697e1146b/numpy-2.4.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4e874c976154687c1f71715b034739b45c7711bec81db01914770373d125e392", size = 18403169, upload-time = "2026-03-29T13:20:17.096Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c9/fcfd5d0639222c6eac7f304829b04892ef51c96a75d479214d77e3ce6e33/numpy-2.4.4-cp313-cp313t-win32.whl", hash = "sha256:9c585a1790d5436a5374bac930dad6ed244c046ed91b2b2a3634eb2971d21008", size = 6083477, upload-time = "2026-03-29T13:20:20.195Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e3/3938a61d1c538aaec8ed6fd6323f57b0c2d2d2219512434c5c878db76553/numpy-2.4.4-cp313-cp313t-win_amd64.whl", hash = "sha256:93e15038125dc1e5345d9b5b68aa7f996ec33b98118d18c6ca0d0b7d6198b7e8", size = 12457487, upload-time = "2026-03-29T13:20:22.946Z" }, + { url = "https://files.pythonhosted.org/packages/97/6a/7e345032cc60501721ef94e0e30b60f6b0bd601f9174ebd36389a2b86d40/numpy-2.4.4-cp313-cp313t-win_arm64.whl", hash = "sha256:0dfd3f9d3adbe2920b68b5cd3d51444e13a10792ec7154cd0a2f6e74d4ab3233", size = 10292002, upload-time = "2026-03-29T13:20:25.909Z" }, + { url = "https://files.pythonhosted.org/packages/6e/06/c54062f85f673dd5c04cbe2f14c3acb8c8b95e3384869bb8cc9bff8cb9df/numpy-2.4.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f169b9a863d34f5d11b8698ead99febeaa17a13ca044961aa8e2662a6c7766a0", size = 16684353, upload-time = "2026-03-29T13:20:29.504Z" }, + { url = "https://files.pythonhosted.org/packages/4c/39/8a320264a84404c74cc7e79715de85d6130fa07a0898f67fb5cd5bd79908/numpy-2.4.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2483e4584a1cb3092da4470b38866634bafb223cbcd551ee047633fd2584599a", size = 14704914, upload-time = "2026-03-29T13:20:33.547Z" }, + { url = "https://files.pythonhosted.org/packages/91/fb/287076b2614e1d1044235f50f03748f31fa287e3dbe6abeb35cdfa351eca/numpy-2.4.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:2d19e6e2095506d1736b7d80595e0f252d76b89f5e715c35e06e937679ea7d7a", size = 5210005, upload-time = "2026-03-29T13:20:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/63/eb/fcc338595309910de6ecabfcef2419a9ce24399680bfb149421fa2df1280/numpy-2.4.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:6a246d5914aa1c820c9443ddcee9c02bec3e203b0c080349533fae17727dfd1b", size = 6544974, upload-time = "2026-03-29T13:20:39.014Z" }, + { url = "https://files.pythonhosted.org/packages/44/5d/e7e9044032a716cdfaa3fba27a8e874bf1c5f1912a1ddd4ed071bf8a14a6/numpy-2.4.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:989824e9faf85f96ec9c7761cd8d29c531ad857bfa1daa930cba85baaecf1a9a", size = 15684591, upload-time = "2026-03-29T13:20:42.146Z" }, + { url = "https://files.pythonhosted.org/packages/98/7c/21252050676612625449b4807d6b695b9ce8a7c9e1c197ee6216c8a65c7c/numpy-2.4.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:27a8d92cd10f1382a67d7cf4db7ce18341b66438bdd9f691d7b0e48d104c2a9d", size = 16637700, upload-time = "2026-03-29T13:20:46.204Z" }, + { url = "https://files.pythonhosted.org/packages/b1/29/56d2bbef9465db24ef25393383d761a1af4f446a1df9b8cded4fe3a5a5d7/numpy-2.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e44319a2953c738205bf3354537979eaa3998ed673395b964c1176083dd46252", size = 17035781, upload-time = "2026-03-29T13:20:50.242Z" }, + { url = "https://files.pythonhosted.org/packages/e3/2b/a35a6d7589d21f44cea7d0a98de5ddcbb3d421b2622a5c96b1edf18707c3/numpy-2.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e892aff75639bbef0d2a2cfd55535510df26ff92f63c92cd84ef8d4ba5a5557f", size = 18362959, upload-time = "2026-03-29T13:20:54.019Z" }, + { url = "https://files.pythonhosted.org/packages/64/c9/d52ec581f2390e0f5f85cbfd80fb83d965fc15e9f0e1aec2195faa142cde/numpy-2.4.4-cp314-cp314-win32.whl", hash = "sha256:1378871da56ca8943c2ba674530924bb8ca40cd228358a3b5f302ad60cf875fc", size = 6008768, upload-time = "2026-03-29T13:20:56.912Z" }, + { url = "https://files.pythonhosted.org/packages/fa/22/4cc31a62a6c7b74a8730e31a4274c5dc80e005751e277a2ce38e675e4923/numpy-2.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:715d1c092715954784bc79e1174fc2a90093dc4dc84ea15eb14dad8abdcdeb74", size = 12449181, upload-time = "2026-03-29T13:20:59.548Z" }, + { url = "https://files.pythonhosted.org/packages/70/2e/14cda6f4d8e396c612d1bf97f22958e92148801d7e4f110cabebdc0eef4b/numpy-2.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:2c194dd721e54ecad9ad387c1d35e63dce5c4450c6dc7dd5611283dda239aabb", size = 10496035, upload-time = "2026-03-29T13:21:02.524Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e8/8fed8c8d848d7ecea092dc3469643f9d10bc3a134a815a3b033da1d2039b/numpy-2.4.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2aa0613a5177c264ff5921051a5719d20095ea586ca88cc802c5c218d1c67d3e", size = 14824958, upload-time = "2026-03-29T13:21:05.671Z" }, + { url = "https://files.pythonhosted.org/packages/05/1a/d8007a5138c179c2bf33ef44503e83d70434d2642877ee8fbb230e7c0548/numpy-2.4.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:42c16925aa5a02362f986765f9ebabf20de75cdefdca827d14315c568dcab113", size = 5330020, upload-time = "2026-03-29T13:21:08.635Z" }, + { url = "https://files.pythonhosted.org/packages/99/64/ffb99ac6ae93faf117bcbd5c7ba48a7f45364a33e8e458545d3633615dda/numpy-2.4.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:874f200b2a981c647340f841730fc3a2b54c9d940566a3c4149099591e2c4c3d", size = 6650758, upload-time = "2026-03-29T13:21:10.949Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6e/795cc078b78a384052e73b2f6281ff7a700e9bf53bcce2ee579d4f6dd879/numpy-2.4.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9b39d38a9bd2ae1becd7eac1303d031c5c110ad31f2b319c6e7d98b135c934d", size = 15729948, upload-time = "2026-03-29T13:21:14.047Z" }, + { url = "https://files.pythonhosted.org/packages/5f/86/2acbda8cc2af5f3d7bfc791192863b9e3e19674da7b5e533fded124d1299/numpy-2.4.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b268594bccac7d7cf5844c7732e3f20c50921d94e36d7ec9b79e9857694b1b2f", size = 16679325, upload-time = "2026-03-29T13:21:17.561Z" }, + { url = "https://files.pythonhosted.org/packages/bc/59/cafd83018f4aa55e0ac6fa92aa066c0a1877b77a615ceff1711c260ffae8/numpy-2.4.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ac6b31e35612a26483e20750126d30d0941f949426974cace8e6b5c58a3657b0", size = 17084883, upload-time = "2026-03-29T13:21:21.106Z" }, + { url = "https://files.pythonhosted.org/packages/f0/85/a42548db84e65ece46ab2caea3d3f78b416a47af387fcbb47ec28e660dc2/numpy-2.4.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8e3ed142f2728df44263aaf5fb1f5b0b99f4070c553a0d7f033be65338329150", size = 18403474, upload-time = "2026-03-29T13:21:24.828Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ad/483d9e262f4b831000062e5d8a45e342166ec8aaa1195264982bca267e62/numpy-2.4.4-cp314-cp314t-win32.whl", hash = "sha256:dddbbd259598d7240b18c9d87c56a9d2fb3b02fe266f49a7c101532e78c1d871", size = 6155500, upload-time = "2026-03-29T13:21:28.205Z" }, + { url = "https://files.pythonhosted.org/packages/c7/03/2fc4e14c7bd4ff2964b74ba90ecb8552540b6315f201df70f137faa5c589/numpy-2.4.4-cp314-cp314t-win_amd64.whl", hash = "sha256:a7164afb23be6e37ad90b2f10426149fd75aee07ca55653d2aa41e66c4ef697e", size = 12637755, upload-time = "2026-03-29T13:21:31.107Z" }, + { url = "https://files.pythonhosted.org/packages/58/78/548fb8e07b1a341746bfbecb32f2c268470f45fa028aacdbd10d9bc73aab/numpy-2.4.4-cp314-cp314t-win_arm64.whl", hash = "sha256:ba203255017337d39f89bdd58417f03c4426f12beed0440cfd933cb15f8669c7", size = 10566643, upload-time = "2026-03-29T13:21:34.339Z" }, ] [[package]] @@ -2323,7 +2588,7 @@ wheels = [ [[package]] name = "openai" -version = "2.15.0" +version = "2.32.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -2335,9 +2600,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/94/f4/4690ecb5d70023ce6bfcfeabfe717020f654bde59a775058ec6ac4692463/openai-2.15.0.tar.gz", hash = "sha256:42eb8cbb407d84770633f31bf727d4ffb4138711c670565a41663d9439174fba", size = 627383, upload-time = "2026-01-09T22:10:08.603Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/59/bdcc6b759b8c42dd73afaf5bf8f902c04b37987a5514dbc1c64dba390fef/openai-2.32.0.tar.gz", hash = "sha256:c54b27a9e4cb8d51f0dd94972ffd1a04437efeb259a9e60d8922b8bd26fe55e0", size = 693286, upload-time = "2026-04-15T22:28:19.434Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/df/c306f7375d42bafb379934c2df4c2fa3964656c8c782bac75ee10c102818/openai-2.15.0-py3-none-any.whl", hash = "sha256:6ae23b932cd7230f7244e52954daa6602716d6b9bf235401a107af731baea6c3", size = 1067879, upload-time = "2026-01-09T22:10:06.446Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c1/d6e64ccd0536bf616556f0cad2b6d94a8125f508d25cfd814b1d2db4e2f1/openai-2.32.0-py3-none-any.whl", hash = "sha256:4dcc9badeb4bf54ad0d187453742f290226d30150890b7890711bda4f32f192f", size = 1162570, upload-time = "2026-04-15T22:28:17.714Z" }, ] [[package]] @@ -2354,32 +2619,32 @@ wheels = [ [[package]] name = "opentelemetry-api" -version = "1.40.0" +version = "1.41.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "importlib-metadata" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2c/1d/4049a9e8698361cc1a1aa03a6c59e4fa4c71e0c0f94a30f988a6876a2ae6/opentelemetry_api-1.40.0.tar.gz", hash = "sha256:159be641c0b04d11e9ecd576906462773eb97ae1b657730f0ecf64d32071569f", size = 70851, upload-time = "2026-03-04T14:17:21.555Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/fc/b7564cbef36601aef0d6c9bc01f7badb64be8e862c2e1c3c5c3b43b53e4f/opentelemetry_api-1.41.1.tar.gz", hash = "sha256:0ad1814d73b875f84494387dae86ce0b12c68556331ce6ce8fe789197c949621", size = 71416, upload-time = "2026-04-24T13:15:38.262Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/bf/93795954016c522008da367da292adceed71cca6ee1717e1d64c83089099/opentelemetry_api-1.40.0-py3-none-any.whl", hash = "sha256:82dd69331ae74b06f6a874704be0cfaa49a1650e1537d4a813b86ecef7d0ecf9", size = 68676, upload-time = "2026-03-04T14:17:01.24Z" }, + { url = "https://files.pythonhosted.org/packages/29/59/3e7118ed140f76b0982ba4321bdaed1997a0473f9720de2d10788a577033/opentelemetry_api-1.41.1-py3-none-any.whl", hash = "sha256:a22df900e75c76dc08440710e51f52f1aa6b451b429298896023e60db5b3139f", size = 69007, upload-time = "2026-04-24T13:15:15.662Z" }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.40.0" +version = "1.41.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-proto" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/51/bc/1559d46557fe6eca0b46c88d4c2676285f1f3be2e8d06bb5d15fbffc814a/opentelemetry_exporter_otlp_proto_common-1.40.0.tar.gz", hash = "sha256:1cbee86a4064790b362a86601ee7934f368b81cd4cc2f2e163902a6e7818a0fa", size = 20416, upload-time = "2026-03-04T14:17:23.801Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/fa/f9e3bd3c4d692b3ce9a2880a167d1f79681a1bea11f00d5bf76adc03e6ea/opentelemetry_exporter_otlp_proto_common-1.41.1.tar.gz", hash = "sha256:0e253156ea9c36b0bd3d2440c5c9ba7dd1f3fb64ba7a08fc85fbac536b56e1fb", size = 20409, upload-time = "2026-04-24T13:15:40.924Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/ca/8f122055c97a932311a3f640273f084e738008933503d0c2563cd5d591fc/opentelemetry_exporter_otlp_proto_common-1.40.0-py3-none-any.whl", hash = "sha256:7081ff453835a82417bf38dccf122c827c3cbc94f2079b03bba02a3165f25149", size = 18369, upload-time = "2026-03-04T14:17:04.796Z" }, + { url = "https://files.pythonhosted.org/packages/29/48/bce76d3ea772b609757e9bc844e02ab408a6446609bf74fb562062ba6b71/opentelemetry_exporter_otlp_proto_common-1.41.1-py3-none-any.whl", hash = "sha256:10da74dad6a49344b9b7b21b6182e3060373a235fde1528616d5f01f92e66aa9", size = 18366, upload-time = "2026-04-24T13:15:18.917Z" }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-http" -version = "1.40.0" +version = "1.41.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "googleapis-common-protos" }, @@ -2390,200 +2655,210 @@ dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2e/fa/73d50e2c15c56be4d000c98e24221d494674b0cc95524e2a8cb3856d95a4/opentelemetry_exporter_otlp_proto_http-1.40.0.tar.gz", hash = "sha256:db48f5e0f33217588bbc00274a31517ba830da576e59503507c839b38fa0869c", size = 17772, upload-time = "2026-03-04T14:17:25.324Z" } +sdist = { url = "https://files.pythonhosted.org/packages/33/5b/9d3c7f70cca10136ba82a81e738dee626c8e7fc61c6887ea9a58bf34c606/opentelemetry_exporter_otlp_proto_http-1.41.1.tar.gz", hash = "sha256:4747a9604c8550ab38c6fd6180e2fcb80de3267060bef2c306bad3cb443302bc", size = 24139, upload-time = "2026-04-24T13:15:42.977Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/3a/8865d6754e61c9fb170cdd530a124a53769ee5f740236064816eb0ca7301/opentelemetry_exporter_otlp_proto_http-1.40.0-py3-none-any.whl", hash = "sha256:a8d1dab28f504c5d96577d6509f80a8150e44e8f45f82cdbe0e34c99ab040069", size = 19960, upload-time = "2026-03-04T14:17:07.153Z" }, + { url = "https://files.pythonhosted.org/packages/ba/4d/ef07ff2fc630849f2080ae0ae73a61f67257905b7ac79066640bfa0c5739/opentelemetry_exporter_otlp_proto_http-1.41.1-py3-none-any.whl", hash = "sha256:1a21e8f49c7a946d935551e90947d6c3eb39236723c6624401da0f33d68edcb4", size = 22673, upload-time = "2026-04-24T13:15:21.313Z" }, ] [[package]] name = "opentelemetry-proto" -version = "1.40.0" +version = "1.41.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4c/77/dd38991db037fdfce45849491cb61de5ab000f49824a00230afb112a4392/opentelemetry_proto-1.40.0.tar.gz", hash = "sha256:03f639ca129ba513f5819810f5b1f42bcb371391405d99c168fe6937c62febcd", size = 45667, upload-time = "2026-03-04T14:17:31.194Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/e8/633c6d8a9c8840338b105907e55c32d3da1983abab5e52f899f72a82c3d1/opentelemetry_proto-1.41.1.tar.gz", hash = "sha256:4b9d2eb631237ea43b80e16c073af438554e32bc7e9e3f8ca4a9582f900020e5", size = 45670, upload-time = "2026-04-24T13:15:49.768Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/b2/189b2577dde745b15625b3214302605b1353436219d42b7912e77fa8dc24/opentelemetry_proto-1.40.0-py3-none-any.whl", hash = "sha256:266c4385d88923a23d63e353e9761af0f47a6ed0d486979777fe4de59dc9b25f", size = 72073, upload-time = "2026-03-04T14:17:16.673Z" }, + { url = "https://files.pythonhosted.org/packages/e4/1e/5cd77035e3e82070e2265a63a760f715aacd3cb16dddc7efee913f297fcc/opentelemetry_proto-1.41.1-py3-none-any.whl", hash = "sha256:0496713b804d127a4147e32849fbaf5683fac8ee98550e8e7679cd706c289720", size = 72076, upload-time = "2026-04-24T13:15:32.542Z" }, ] [[package]] name = "opentelemetry-sdk" -version = "1.40.0" +version = "1.41.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/58/fd/3c3125b20ba18ce2155ba9ea74acb0ae5d25f8cd39cfd37455601b7955cc/opentelemetry_sdk-1.40.0.tar.gz", hash = "sha256:18e9f5ec20d859d268c7cb3c5198c8d105d073714db3de50b593b8c1345a48f2", size = 184252, upload-time = "2026-03-04T14:17:31.87Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/d0/54ee30dab82fb0acda23d144502771ff76ef8728459c83c3e89ef9fb1825/opentelemetry_sdk-1.41.1.tar.gz", hash = "sha256:724b615e1215b5aeacda0abb8a6a8922c9a1853068948bd0bd225a56d0c792e6", size = 230180, upload-time = "2026-04-24T13:15:50.991Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/c5/6a852903d8bfac758c6dc6e9a68b015d3c33f2f1be5e9591e0f4b69c7e0a/opentelemetry_sdk-1.40.0-py3-none-any.whl", hash = "sha256:787d2154a71f4b3d81f20524a8ce061b7db667d24e46753f32a7bc48f1c1f3f1", size = 141951, upload-time = "2026-03-04T14:17:17.961Z" }, + { url = "https://files.pythonhosted.org/packages/b4/e7/a1420b698aad018e1cf60fdbaaccbe49021fb415e2a0d81c242f4c518f54/opentelemetry_sdk-1.41.1-py3-none-any.whl", hash = "sha256:edee379c126c1bce952b0c812b48fe8ff35b30df0eecf17e98afa4d598b7d85d", size = 180213, upload-time = "2026-04-24T13:15:33.767Z" }, ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.61b0" +version = "0.62b1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/c0/4ae7973f3c2cfd2b6e321f1675626f0dab0a97027cc7a297474c9c8f3d04/opentelemetry_semantic_conventions-0.61b0.tar.gz", hash = "sha256:072f65473c5d7c6dc0355b27d6c9d1a679d63b6d4b4b16a9773062cb7e31192a", size = 145755, upload-time = "2026-03-04T14:17:32.664Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/de/911ac9e309052aca1b20b2d5549d3db45d1011e1a610e552c6ccdd1b64f8/opentelemetry_semantic_conventions-0.62b1.tar.gz", hash = "sha256:c5cc6e04a7f8c7cdd30be2ed81499fa4e75bfbd52c9cb70d40af1f9cd3619802", size = 145750, upload-time = "2026-04-24T13:15:52.236Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/37/cc6a55e448deaa9b27377d087da8615a3416d8ad523d5960b78dbeadd02a/opentelemetry_semantic_conventions-0.61b0-py3-none-any.whl", hash = "sha256:fa530a96be229795f8cef353739b618148b0fe2b4b3f005e60e262926c4d38e2", size = 231621, upload-time = "2026-03-04T14:17:19.33Z" }, + { url = "https://files.pythonhosted.org/packages/eb/a6/83dc2ab6fa397ee66fba04fe2e74bdf7be3b3870005359ceb7689103c058/opentelemetry_semantic_conventions-0.62b1-py3-none-any.whl", hash = "sha256:cf506938103d331fbb78eded0d9788095f7fd59016f2bda813c3324e5a74a93c", size = 231620, upload-time = "2026-04-24T13:15:35.454Z" }, ] [[package]] name = "orjson" -version = "3.11.6" +version = "3.11.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/a3/4e09c61a5f0c521cba0bb433639610ae037437669f1a4cbc93799e731d78/orjson-3.11.6.tar.gz", hash = "sha256:0a54c72259f35299fd033042367df781c2f66d10252955ca1efb7db309b954cb", size = 6175856, upload-time = "2026-01-29T15:13:07.942Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/1b/2024d06792d0779f9dbc51531b61c24f76c75b9f4ce05e6f3377a1814cea/orjson-3.11.8.tar.gz", hash = "sha256:96163d9cdc5a202703e9ad1b9ae757d5f0ca62f4fa0cc93d1f27b0e180cc404e", size = 5603832, upload-time = "2026-03-31T16:16:27.878Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/14/ba/759f2879f41910b7e5e0cdbd9cf82a4f017c527fb0e972e9869ca7fe4c8e/orjson-3.11.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6f03f30cd8953f75f2a439070c743c7336d10ee940da918d71c6f3556af3ddcf", size = 249988, upload-time = "2026-01-29T15:11:58.294Z" }, - { url = "https://files.pythonhosted.org/packages/f0/70/54cecb929e6c8b10104fcf580b0cc7dc551aa193e83787dd6f3daba28bb5/orjson-3.11.6-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:af44baae65ef386ad971469a8557a0673bb042b0b9fd4397becd9c2dfaa02588", size = 134445, upload-time = "2026-01-29T15:11:59.819Z" }, - { url = "https://files.pythonhosted.org/packages/f2/6f/ec0309154457b9ba1ad05f11faa4441f76037152f75e1ac577db3ce7ca96/orjson-3.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c310a48542094e4f7dbb6ac076880994986dda8ca9186a58c3cb70a3514d3231", size = 137708, upload-time = "2026-01-29T15:12:01.488Z" }, - { url = "https://files.pythonhosted.org/packages/20/52/3c71b80840f8bab9cb26417302707b7716b7d25f863f3a541bcfa232fe6e/orjson-3.11.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d8dfa7a5d387f15ecad94cb6b2d2d5f4aeea64efd8d526bfc03c9812d01e1cc0", size = 134798, upload-time = "2026-01-29T15:12:02.705Z" }, - { url = "https://files.pythonhosted.org/packages/30/51/b490a43b22ff736282360bd02e6bded455cf31dfc3224e01cd39f919bbd2/orjson-3.11.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba8daee3e999411b50f8b50dbb0a3071dd1845f3f9a1a0a6fa6de86d1689d84d", size = 140839, upload-time = "2026-01-29T15:12:03.956Z" }, - { url = "https://files.pythonhosted.org/packages/95/bc/4bcfe4280c1bc63c5291bb96f98298845b6355da2226d3400e17e7b51e53/orjson-3.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f89d104c974eafd7436d7a5fdbc57f7a1e776789959a2f4f1b2eab5c62a339f4", size = 144080, upload-time = "2026-01-29T15:12:05.151Z" }, - { url = "https://files.pythonhosted.org/packages/01/74/22970f9ead9ab1f1b5f8c227a6c3aa8d71cd2c5acd005868a1d44f2362fa/orjson-3.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2e2e2456788ca5ea75616c40da06fc885a7dc0389780e8a41bf7c5389ba257b", size = 142435, upload-time = "2026-01-29T15:12:06.641Z" }, - { url = "https://files.pythonhosted.org/packages/29/34/d564aff85847ab92c82ee43a7a203683566c2fca0723a5f50aebbe759603/orjson-3.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a42efebc45afabb1448001e90458c4020d5c64fbac8a8dc4045b777db76cb5a", size = 145631, upload-time = "2026-01-29T15:12:08.351Z" }, - { url = "https://files.pythonhosted.org/packages/e7/ef/016957a3890752c4aa2368326ea69fa53cdc1fdae0a94a542b6410dbdf52/orjson-3.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71b7cbef8471324966c3738c90ba38775563ef01b512feb5ad4805682188d1b9", size = 147058, upload-time = "2026-01-29T15:12:10.023Z" }, - { url = "https://files.pythonhosted.org/packages/56/cc/9a899c3972085645b3225569f91a30e221f441e5dc8126e6d060b971c252/orjson-3.11.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:f8515e5910f454fe9a8e13c2bb9dc4bae4c1836313e967e72eb8a4ad874f0248", size = 421161, upload-time = "2026-01-29T15:12:11.308Z" }, - { url = "https://files.pythonhosted.org/packages/21/a8/767d3fbd6d9b8fdee76974db40619399355fd49bf91a6dd2c4b6909ccf05/orjson-3.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:300360edf27c8c9bf7047345a94fddf3a8b8922df0ff69d71d854a170cb375cf", size = 155757, upload-time = "2026-01-29T15:12:12.776Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0b/205cd69ac87e2272e13ef3f5f03a3d4657e317e38c1b08aaa2ef97060bbc/orjson-3.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:caaed4dad39e271adfadc106fab634d173b2bb23d9cf7e67bd645f879175ebfc", size = 147446, upload-time = "2026-01-29T15:12:14.166Z" }, - { url = "https://files.pythonhosted.org/packages/de/c5/dd9f22aa9f27c54c7d05cc32f4580c9ac9b6f13811eeb81d6c4c3f50d6b1/orjson-3.11.6-cp312-cp312-win32.whl", hash = "sha256:955368c11808c89793e847830e1b1007503a5923ddadc108547d3b77df761044", size = 139717, upload-time = "2026-01-29T15:12:15.7Z" }, - { url = "https://files.pythonhosted.org/packages/23/a1/e62fc50d904486970315a1654b8cfb5832eb46abb18cd5405118e7e1fc79/orjson-3.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:2c68de30131481150073d90a5d227a4a421982f42c025ecdfb66157f9579e06f", size = 136711, upload-time = "2026-01-29T15:12:17.055Z" }, - { url = "https://files.pythonhosted.org/packages/04/3d/b4fefad8bdf91e0fe212eb04975aeb36ea92997269d68857efcc7eb1dda3/orjson-3.11.6-cp312-cp312-win_arm64.whl", hash = "sha256:65dfa096f4e3a5e02834b681f539a87fbe85adc82001383c0db907557f666bfc", size = 135212, upload-time = "2026-01-29T15:12:18.3Z" }, - { url = "https://files.pythonhosted.org/packages/ae/45/d9c71c8c321277bc1ceebf599bc55ba826ae538b7c61f287e9a7e71bd589/orjson-3.11.6-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e4ae1670caabb598a88d385798692ce2a1b2f078971b3329cfb85253c6097f5b", size = 249828, upload-time = "2026-01-29T15:12:20.14Z" }, - { url = "https://files.pythonhosted.org/packages/ac/7e/4afcf4cfa9c2f93846d70eee9c53c3c0123286edcbeb530b7e9bd2aea1b2/orjson-3.11.6-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:2c6b81f47b13dac2caa5d20fbc953c75eb802543abf48403a4703ed3bff225f0", size = 134339, upload-time = "2026-01-29T15:12:22.01Z" }, - { url = "https://files.pythonhosted.org/packages/40/10/6d2b8a064c8d2411d3d0ea6ab43125fae70152aef6bea77bb50fa54d4097/orjson-3.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:647d6d034e463764e86670644bdcaf8e68b076e6e74783383b01085ae9ab334f", size = 137662, upload-time = "2026-01-29T15:12:23.307Z" }, - { url = "https://files.pythonhosted.org/packages/5a/50/5804ea7d586baf83ee88969eefda97a24f9a5bdba0727f73e16305175b26/orjson-3.11.6-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8523b9cc4ef174ae52414f7699e95ee657c16aa18b3c3c285d48d7966cce9081", size = 134626, upload-time = "2026-01-29T15:12:25.099Z" }, - { url = "https://files.pythonhosted.org/packages/9e/2e/f0492ed43e376722bb4afd648e06cc1e627fc7ec8ff55f6ee739277813ea/orjson-3.11.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:313dfd7184cde50c733fc0d5c8c0e2f09017b573afd11dc36bd7476b30b4cb17", size = 140873, upload-time = "2026-01-29T15:12:26.369Z" }, - { url = "https://files.pythonhosted.org/packages/10/15/6f874857463421794a303a39ac5494786ad46a4ab46d92bda6705d78c5aa/orjson-3.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:905ee036064ff1e1fd1fb800055ac477cdcb547a78c22c1bc2bbf8d5d1a6fb42", size = 144044, upload-time = "2026-01-29T15:12:28.082Z" }, - { url = "https://files.pythonhosted.org/packages/d2/c7/b7223a3a70f1d0cc2d86953825de45f33877ee1b124a91ca1f79aa6e643f/orjson-3.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce374cb98411356ba906914441fc993f271a7a666d838d8de0e0900dd4a4bc12", size = 142396, upload-time = "2026-01-29T15:12:30.529Z" }, - { url = "https://files.pythonhosted.org/packages/87/e3/aa1b6d3ad3cd80f10394134f73ae92a1d11fdbe974c34aa199cc18bb5fcf/orjson-3.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cded072b9f65fcfd188aead45efa5bd528ba552add619b3ad2a81f67400ec450", size = 145600, upload-time = "2026-01-29T15:12:31.848Z" }, - { url = "https://files.pythonhosted.org/packages/f6/cf/e4aac5a46cbd39d7e769ef8650efa851dfce22df1ba97ae2b33efe893b12/orjson-3.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ab85bdbc138e1f73a234db6bb2e4cc1f0fcec8f4bd2bd2430e957a01aadf746", size = 146967, upload-time = "2026-01-29T15:12:33.203Z" }, - { url = "https://files.pythonhosted.org/packages/0b/04/975b86a4bcf6cfeda47aad15956d52fbeda280811206e9967380fa9355c8/orjson-3.11.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:351b96b614e3c37a27b8ab048239ebc1e0be76cc17481a430d70a77fb95d3844", size = 421003, upload-time = "2026-01-29T15:12:35.097Z" }, - { url = "https://files.pythonhosted.org/packages/28/d1/0369d0baf40eea5ff2300cebfe209883b2473ab4aa4c4974c8bd5ee42bb2/orjson-3.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f9959c85576beae5cdcaaf39510b15105f1ee8b70d5dacd90152617f57be8c83", size = 155695, upload-time = "2026-01-29T15:12:36.589Z" }, - { url = "https://files.pythonhosted.org/packages/ab/1f/d10c6d6ae26ff1d7c3eea6fd048280ef2e796d4fb260c5424fd021f68ecf/orjson-3.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:75682d62b1b16b61a30716d7a2ec1f4c36195de4a1c61f6665aedd947b93a5d5", size = 147392, upload-time = "2026-01-29T15:12:37.876Z" }, - { url = "https://files.pythonhosted.org/packages/8d/43/7479921c174441a0aa5277c313732e20713c0969ac303be9f03d88d3db5d/orjson-3.11.6-cp313-cp313-win32.whl", hash = "sha256:40dc277999c2ef227dcc13072be879b4cfd325502daeb5c35ed768f706f2bf30", size = 139718, upload-time = "2026-01-29T15:12:39.274Z" }, - { url = "https://files.pythonhosted.org/packages/88/bc/9ffe7dfbf8454bc4e75bb8bf3a405ed9e0598df1d3535bb4adcd46be07d0/orjson-3.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:f0f6e9f8ff7905660bc3c8a54cd4a675aa98f7f175cf00a59815e2ff42c0d916", size = 136635, upload-time = "2026-01-29T15:12:40.593Z" }, - { url = "https://files.pythonhosted.org/packages/6f/7e/51fa90b451470447ea5023b20d83331ec741ae28d1e6d8ed547c24e7de14/orjson-3.11.6-cp313-cp313-win_arm64.whl", hash = "sha256:1608999478664de848e5900ce41f25c4ecdfc4beacbc632b6fd55e1a586e5d38", size = 135175, upload-time = "2026-01-29T15:12:41.997Z" }, - { url = "https://files.pythonhosted.org/packages/31/9f/46ca908abaeeec7560638ff20276ab327b980d73b3cc2f5b205b4a1c60b3/orjson-3.11.6-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6026db2692041d2a23fe2545606df591687787825ad5821971ef0974f2c47630", size = 249823, upload-time = "2026-01-29T15:12:43.332Z" }, - { url = "https://files.pythonhosted.org/packages/ff/78/ca478089818d18c9cd04f79c43f74ddd031b63c70fa2a946eb5e85414623/orjson-3.11.6-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:132b0ab2e20c73afa85cf142e547511feb3d2f5b7943468984658f3952b467d4", size = 134328, upload-time = "2026-01-29T15:12:45.171Z" }, - { url = "https://files.pythonhosted.org/packages/39/5e/cbb9d830ed4e47f4375ad8eef8e4fff1bf1328437732c3809054fc4e80be/orjson-3.11.6-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b376fb05f20a96ec117d47987dd3b39265c635725bda40661b4c5b73b77b5fde", size = 137651, upload-time = "2026-01-29T15:12:46.602Z" }, - { url = "https://files.pythonhosted.org/packages/7c/3a/35df6558c5bc3a65ce0961aefee7f8364e59af78749fc796ea255bfa0cf5/orjson-3.11.6-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:954dae4e080574672a1dfcf2a840eddef0f27bd89b0e94903dd0824e9c1db060", size = 134596, upload-time = "2026-01-29T15:12:47.95Z" }, - { url = "https://files.pythonhosted.org/packages/cd/8e/3d32dd7b7f26a19cc4512d6ed0ae3429567c71feef720fe699ff43c5bc9e/orjson-3.11.6-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe515bb89d59e1e4b48637a964f480b35c0a2676de24e65e55310f6016cca7ce", size = 140923, upload-time = "2026-01-29T15:12:49.333Z" }, - { url = "https://files.pythonhosted.org/packages/6c/9c/1efbf5c99b3304f25d6f0d493a8d1492ee98693637c10ce65d57be839d7b/orjson-3.11.6-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:380f9709c275917af28feb086813923251e11ee10687257cd7f1ea188bcd4485", size = 144068, upload-time = "2026-01-29T15:12:50.927Z" }, - { url = "https://files.pythonhosted.org/packages/82/83/0d19eeb5be797de217303bbb55dde58dba26f996ed905d301d98fd2d4637/orjson-3.11.6-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8173e0d3f6081e7034c51cf984036d02f6bab2a2126de5a759d79f8e5a140e7", size = 142493, upload-time = "2026-01-29T15:12:52.432Z" }, - { url = "https://files.pythonhosted.org/packages/32/a7/573fec3df4dc8fc259b7770dc6c0656f91adce6e19330c78d23f87945d1e/orjson-3.11.6-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dddf9ba706294906c56ef5150a958317b09aa3a8a48df1c52ccf22ec1907eac", size = 145616, upload-time = "2026-01-29T15:12:53.903Z" }, - { url = "https://files.pythonhosted.org/packages/c2/0e/23551b16f21690f7fd5122e3cf40fdca5d77052a434d0071990f97f5fe2f/orjson-3.11.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cbae5c34588dc79938dffb0b6fbe8c531f4dc8a6ad7f39759a9eb5d2da405ef2", size = 146951, upload-time = "2026-01-29T15:12:55.698Z" }, - { url = "https://files.pythonhosted.org/packages/b8/63/5e6c8f39805c39123a18e412434ea364349ee0012548d08aa586e2bd6aa9/orjson-3.11.6-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:f75c318640acbddc419733b57f8a07515e587a939d8f54363654041fd1f4e465", size = 421024, upload-time = "2026-01-29T15:12:57.434Z" }, - { url = "https://files.pythonhosted.org/packages/1d/4d/724975cf0087f6550bd01fd62203418afc0ea33fd099aed318c5bcc52df8/orjson-3.11.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:e0ab8d13aa2a3e98b4a43487c9205b2c92c38c054b4237777484d503357c8437", size = 155774, upload-time = "2026-01-29T15:12:59.397Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a3/f4c4e3f46b55db29e0a5f20493b924fc791092d9a03ff2068c9fe6c1002f/orjson-3.11.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f884c7fb1020d44612bd7ac0db0babba0e2f78b68d9a650c7959bf99c783773f", size = 147393, upload-time = "2026-01-29T15:13:00.769Z" }, - { url = "https://files.pythonhosted.org/packages/ee/86/6f5529dd27230966171ee126cecb237ed08e9f05f6102bfaf63e5b32277d/orjson-3.11.6-cp314-cp314-win32.whl", hash = "sha256:8d1035d1b25732ec9f971e833a3e299d2b1a330236f75e6fd945ad982c76aaf3", size = 139760, upload-time = "2026-01-29T15:13:02.173Z" }, - { url = "https://files.pythonhosted.org/packages/d3/b5/91ae7037b2894a6b5002fb33f4fbccec98424a928469835c3837fbb22a9b/orjson-3.11.6-cp314-cp314-win_amd64.whl", hash = "sha256:931607a8865d21682bb72de54231655c86df1870502d2962dbfd12c82890d077", size = 136633, upload-time = "2026-01-29T15:13:04.267Z" }, - { url = "https://files.pythonhosted.org/packages/55/74/f473a3ec7a0a7ebc825ca8e3c86763f7d039f379860c81ba12dcdd456547/orjson-3.11.6-cp314-cp314-win_arm64.whl", hash = "sha256:fe71f6b283f4f1832204ab8235ce07adad145052614f77c876fcf0dac97bc06f", size = 135168, upload-time = "2026-01-29T15:13:05.932Z" }, + { url = "https://files.pythonhosted.org/packages/01/f6/8d58b32ab32d9215973a1688aebd098252ee8af1766c0e4e36e7831f0295/orjson-3.11.8-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1cd0b77e77c95758f8e1100139844e99f3ccc87e71e6fc8e1c027e55807c549f", size = 229233, upload-time = "2026-03-31T16:15:12.762Z" }, + { url = "https://files.pythonhosted.org/packages/a9/8b/2ffe35e71f6b92622e8ea4607bf33ecf7dfb51b3619dcfabfd36cbe2d0a5/orjson-3.11.8-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:6a3d159d5ffa0e3961f353c4b036540996bf8b9697ccc38261c0eac1fd3347a6", size = 128772, upload-time = "2026-03-31T16:15:14.237Z" }, + { url = "https://files.pythonhosted.org/packages/27/d2/1f8682ae50d5c6897a563cb96bc106da8c9cb5b7b6e81a52e4cc086679b9/orjson-3.11.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76070a76e9c5ae661e2d9848f216980d8d533e0f8143e6ed462807b242e3c5e8", size = 131946, upload-time = "2026-03-31T16:15:15.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/4b/5500f76f0eece84226e0689cb48dcde081104c2fa6e2483d17ca13685ffb/orjson-3.11.8-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54153d21520a71a4c82a0dbb4523e468941d549d221dc173de0f019678cf3813", size = 130368, upload-time = "2026-03-31T16:15:17.066Z" }, + { url = "https://files.pythonhosted.org/packages/da/4e/58b927e08fbe9840e6c920d9e299b051ea667463b1f39a56e668669f8508/orjson-3.11.8-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:469ac2125611b7c5741a0b3798cd9e5786cbad6345f9f400c77212be89563bec", size = 135540, upload-time = "2026-03-31T16:15:18.404Z" }, + { url = "https://files.pythonhosted.org/packages/56/7c/ba7cb871cba1bcd5cd02ee34f98d894c6cea96353ad87466e5aef2429c60/orjson-3.11.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14778ffd0f6896aa613951a7fbf4690229aa7a543cb2bfbe9f358e08aafa9546", size = 146877, upload-time = "2026-03-31T16:15:19.833Z" }, + { url = "https://files.pythonhosted.org/packages/0b/5d/eb9c25fc1386696c6a342cd361c306452c75e0b55e86ad602dd4827a7fd7/orjson-3.11.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea56a955056a6d6c550cf18b3348656a9d9a4f02e2d0c02cabf3c73f1055d506", size = 132837, upload-time = "2026-03-31T16:15:21.282Z" }, + { url = "https://files.pythonhosted.org/packages/37/87/5ddeb7fc1fbd9004aeccab08426f34c81a5b4c25c7061281862b015fce2b/orjson-3.11.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53a0f57e59a530d18a142f4d4ba6dfc708dc5fdedce45e98ff06b44930a2a48f", size = 133624, upload-time = "2026-03-31T16:15:22.641Z" }, + { url = "https://files.pythonhosted.org/packages/22/09/90048793db94ee4b2fcec4ac8e5ddb077367637d6650be896b3494b79bb7/orjson-3.11.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b48e274f8824567d74e2158199e269597edf00823a1b12b63d48462bbf5123e", size = 141904, upload-time = "2026-03-31T16:15:24.435Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cf/eb284847487821a5d415e54149a6449ba9bfc5872ce63ab7be41b8ec401c/orjson-3.11.8-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:3f262401086a3960586af06c054609365e98407151f5ea24a62893a40d80dbbb", size = 423742, upload-time = "2026-03-31T16:15:26.155Z" }, + { url = "https://files.pythonhosted.org/packages/44/09/e12423d327071c851c13e76936f144a96adacfc037394dec35ac3fc8d1e8/orjson-3.11.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8e8c6218b614badf8e229b697865df4301afa74b791b6c9ade01d19a9953a942", size = 147806, upload-time = "2026-03-31T16:15:27.909Z" }, + { url = "https://files.pythonhosted.org/packages/b3/6d/37c2589ba864e582ffe7611643314785c6afb1f83c701654ef05daa8fcc7/orjson-3.11.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:093d489fa039ddade2db541097dbb484999fcc65fc2b0ff9819141e2ab364f25", size = 136485, upload-time = "2026-03-31T16:15:29.749Z" }, + { url = "https://files.pythonhosted.org/packages/be/c9/135194a02ab76b04ed9a10f68624b7ebd238bbe55548878b11ff15a0f352/orjson-3.11.8-cp312-cp312-win32.whl", hash = "sha256:e0950ed1bcb9893f4293fd5c5a7ee10934fbf82c4101c70be360db23ce24b7d2", size = 131966, upload-time = "2026-03-31T16:15:31.687Z" }, + { url = "https://files.pythonhosted.org/packages/ed/9a/9796f8fbe3cf30ce9cb696748dbb535e5c87be4bf4fe2e9ca498ef1fa8cf/orjson-3.11.8-cp312-cp312-win_amd64.whl", hash = "sha256:3cf17c141617b88ced4536b2135c552490f07799f6ad565948ea07bef0dcb9a6", size = 127441, upload-time = "2026-03-31T16:15:33.333Z" }, + { url = "https://files.pythonhosted.org/packages/cc/47/5aaf54524a7a4a0dd09dd778f3fa65dd2108290615b652e23d944152bc8e/orjson-3.11.8-cp312-cp312-win_arm64.whl", hash = "sha256:48854463b0572cc87dac7d981aa72ed8bf6deedc0511853dc76b8bbd5482d36d", size = 127364, upload-time = "2026-03-31T16:15:34.748Z" }, + { url = "https://files.pythonhosted.org/packages/66/7f/95fba509bb2305fab0073558f1e8c3a2ec4b2afe58ed9fcb7d3b8beafe94/orjson-3.11.8-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:3f23426851d98478c8970da5991f84784a76682213cd50eb73a1da56b95239dc", size = 229180, upload-time = "2026-03-31T16:15:36.426Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9d/b237215c743ca073697d759b5503abd2cb8a0d7b9c9e21f524bcf176ab66/orjson-3.11.8-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:ebaed4cef74a045b83e23537b52ef19a367c7e3f536751e355a2a394f8648559", size = 128754, upload-time = "2026-03-31T16:15:38.049Z" }, + { url = "https://files.pythonhosted.org/packages/42/3d/27d65b6d11e63f133781425f132807aef793ed25075fec686fc8e46dd528/orjson-3.11.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97c8f5d3b62380b70c36ffacb2a356b7c6becec86099b177f73851ba095ef623", size = 131877, upload-time = "2026-03-31T16:15:39.484Z" }, + { url = "https://files.pythonhosted.org/packages/dd/cc/faee30cd8f00421999e40ef0eba7332e3a625ce91a58200a2f52c7fef235/orjson-3.11.8-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:436c4922968a619fb7fef1ccd4b8b3a76c13b67d607073914d675026e911a65c", size = 130361, upload-time = "2026-03-31T16:15:41.274Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bb/a6c55896197f97b6d4b4e7c7fd77e7235517c34f5d6ad5aadd43c54c6d7c/orjson-3.11.8-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ab359aff0436d80bfe8a23b46b5fea69f1e18aaf1760a709b4787f1318b317f", size = 135521, upload-time = "2026-03-31T16:15:42.758Z" }, + { url = "https://files.pythonhosted.org/packages/9c/7c/ca3a3525aa32ff636ebb1778e77e3587b016ab2edb1b618b36ba96f8f2c0/orjson-3.11.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f89b6d0b3a8d81e1929d3ab3d92bbc225688bd80a770c49432543928fe09ac55", size = 146862, upload-time = "2026-03-31T16:15:44.341Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0c/18a9d7f18b5edd37344d1fd5be17e94dc652c67826ab749c6e5948a78112/orjson-3.11.8-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29c009e7a2ca9ad0ed1376ce20dd692146a5d9fe4310848904b6b4fee5c5c137", size = 132847, upload-time = "2026-03-31T16:15:46.368Z" }, + { url = "https://files.pythonhosted.org/packages/23/91/7e722f352ad67ca573cee44de2a58fb810d0f4eb4e33276c6a557979fd8a/orjson-3.11.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b895b781b3e395c067129d8551655642dfe9437273211d5404e87ac752b53", size = 133637, upload-time = "2026-03-31T16:15:48.123Z" }, + { url = "https://files.pythonhosted.org/packages/af/04/32845ce13ac5bd1046ddb02ac9432ba856cc35f6d74dde95864fe0ad5523/orjson-3.11.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:88006eda83858a9fdf73985ce3804e885c2befb2f506c9a3723cdeb5a2880e3e", size = 141906, upload-time = "2026-03-31T16:15:49.626Z" }, + { url = "https://files.pythonhosted.org/packages/02/5e/c551387ddf2d7106d9039369862245c85738b828844d13b99ccb8d61fd06/orjson-3.11.8-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:55120759e61309af7fcf9e961c6f6af3dde5921cdb3ee863ef63fd9db126cae6", size = 423722, upload-time = "2026-03-31T16:15:51.176Z" }, + { url = "https://files.pythonhosted.org/packages/00/a3/ecfe62434096f8a794d4976728cb59bcfc4a643977f21c2040545d37eb4c/orjson-3.11.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:98bdc6cb889d19bed01de46e67574a2eab61f5cc6b768ed50e8ac68e9d6ffab6", size = 147801, upload-time = "2026-03-31T16:15:52.939Z" }, + { url = "https://files.pythonhosted.org/packages/18/6d/0dce10b9f6643fdc59d99333871a38fa5a769d8e2fc34a18e5d2bfdee900/orjson-3.11.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:708c95f925a43ab9f34625e45dcdadf09ec8a6e7b664a938f2f8d5650f6c090b", size = 136460, upload-time = "2026-03-31T16:15:54.431Z" }, + { url = "https://files.pythonhosted.org/packages/01/d6/6dde4f31842d87099238f1f07b459d24edc1a774d20687187443ab044191/orjson-3.11.8-cp313-cp313-win32.whl", hash = "sha256:01c4e5a6695dc09098f2e6468a251bc4671c50922d4d745aff1a0a33a0cf5b8d", size = 131956, upload-time = "2026-03-31T16:15:56.081Z" }, + { url = "https://files.pythonhosted.org/packages/c1/f9/4e494a56e013db957fb77186b818b916d4695b8fa2aa612364974160e91b/orjson-3.11.8-cp313-cp313-win_amd64.whl", hash = "sha256:c154a35dd1330707450bb4d4e7dd1f17fa6f42267a40c1e8a1daa5e13719b4b8", size = 127410, upload-time = "2026-03-31T16:15:57.54Z" }, + { url = "https://files.pythonhosted.org/packages/57/7f/803203d00d6edb6e9e7eef421d4e1adbb5ea973e40b3533f3cfd9aeb374e/orjson-3.11.8-cp313-cp313-win_arm64.whl", hash = "sha256:4861bde57f4d253ab041e374f44023460e60e71efaa121f3c5f0ed457c3a701e", size = 127338, upload-time = "2026-03-31T16:15:59.106Z" }, + { url = "https://files.pythonhosted.org/packages/6d/35/b01910c3d6b85dc882442afe5060cbf719c7d1fc85749294beda23d17873/orjson-3.11.8-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ec795530a73c269a55130498842aaa762e4a939f6ce481a7e986eeaa790e9da4", size = 229171, upload-time = "2026-03-31T16:16:00.651Z" }, + { url = "https://files.pythonhosted.org/packages/c2/56/c9ec97bd11240abef39b9e5d99a15462809c45f677420fd148a6c5e6295e/orjson-3.11.8-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:c492a0e011c0f9066e9ceaa896fbc5b068c54d365fea5f3444b697ee01bc8625", size = 128746, upload-time = "2026-03-31T16:16:02.673Z" }, + { url = "https://files.pythonhosted.org/packages/3b/e4/66d4f30a90de45e2f0cbd9623588e8ae71eef7679dbe2ae954ed6d66a41f/orjson-3.11.8-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:883206d55b1bd5f5679ad5e6ddd3d1a5e3cac5190482927fdb8c78fb699193b5", size = 131867, upload-time = "2026-03-31T16:16:04.342Z" }, + { url = "https://files.pythonhosted.org/packages/19/30/2a645fc9286b928675e43fa2a3a16fb7b6764aa78cc719dc82141e00f30b/orjson-3.11.8-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5774c1fdcc98b2259800b683b19599c133baeb11d60033e2095fd9d4667b82db", size = 124664, upload-time = "2026-03-31T16:16:05.837Z" }, + { url = "https://files.pythonhosted.org/packages/db/44/77b9a86d84a28d52ba3316d77737f6514e17118119ade3f91b639e859029/orjson-3.11.8-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7381c83dd3d4a6347e6635950aa448f54e7b8406a27c7ecb4a37e9f1ae08b", size = 129701, upload-time = "2026-03-31T16:16:07.407Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ea/eff3d9bfe47e9bc6969c9181c58d9f71237f923f9c86a2d2f490cd898c82/orjson-3.11.8-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14439063aebcb92401c11afc68ee4e407258d2752e62d748b6942dad20d2a70d", size = 141202, upload-time = "2026-03-31T16:16:09.48Z" }, + { url = "https://files.pythonhosted.org/packages/52/c8/90d4b4c60c84d62068d0cf9e4d8f0a4e05e76971d133ac0c60d818d4db20/orjson-3.11.8-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa72e71977bff96567b0f500fc5bfd2fdf915f34052c782a4c6ebbdaa97aa858", size = 127194, upload-time = "2026-03-31T16:16:11.02Z" }, + { url = "https://files.pythonhosted.org/packages/8d/c7/ea9e08d1f0ba981adffb629811148b44774d935171e7b3d780ae43c4c254/orjson-3.11.8-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7679bc2f01bb0d219758f1a5f87bb7c8a81c0a186824a393b366876b4948e14f", size = 133639, upload-time = "2026-03-31T16:16:13.434Z" }, + { url = "https://files.pythonhosted.org/packages/6c/8c/ddbbfd6ba59453c8fc7fe1d0e5983895864e264c37481b2a791db635f046/orjson-3.11.8-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:14f7b8fcb35ef403b42fa5ecfa4ed032332a91f3dc7368fbce4184d59e1eae0d", size = 141914, upload-time = "2026-03-31T16:16:14.955Z" }, + { url = "https://files.pythonhosted.org/packages/4e/31/dbfbefec9df060d34ef4962cd0afcb6fa7a9ec65884cb78f04a7859526c3/orjson-3.11.8-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:c2bdf7b2facc80b5e34f48a2d557727d5c5c57a8a450de122ae81fa26a81c1bc", size = 423800, upload-time = "2026-03-31T16:16:16.594Z" }, + { url = "https://files.pythonhosted.org/packages/87/cf/f74e9ae9803d4ab46b163494adba636c6d7ea955af5cc23b8aaa94cfd528/orjson-3.11.8-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ccd7ba1b0605813a0715171d39ec4c314cb97a9c85893c2c5c0c3a3729df38bf", size = 147837, upload-time = "2026-03-31T16:16:18.585Z" }, + { url = "https://files.pythonhosted.org/packages/64/e6/9214f017b5db85e84e68602792f742e5dc5249e963503d1b356bee611e01/orjson-3.11.8-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdbc8c9c02463fef4d3c53a9ba3336d05496ec8e1f1c53326a1e4acc11f5c600", size = 136441, upload-time = "2026-03-31T16:16:20.151Z" }, + { url = "https://files.pythonhosted.org/packages/24/dd/3590348818f58f837a75fb969b04cdf187ae197e14d60b5e5a794a38b79d/orjson-3.11.8-cp314-cp314-win32.whl", hash = "sha256:0b57f67710a8cd459e4e54eb96d5f77f3624eba0c661ba19a525807e42eccade", size = 131983, upload-time = "2026-03-31T16:16:21.823Z" }, + { url = "https://files.pythonhosted.org/packages/3f/0f/b6cb692116e05d058f31ceee819c70f097fa9167c82f67fabe7516289abc/orjson-3.11.8-cp314-cp314-win_amd64.whl", hash = "sha256:735e2262363dcbe05c35e3a8869898022af78f89dde9e256924dc02e99fe69ca", size = 127396, upload-time = "2026-03-31T16:16:23.685Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d1/facb5b5051fabb0ef9d26c6544d87ef19a939a9a001198655d0d891062dd/orjson-3.11.8-cp314-cp314-win_arm64.whl", hash = "sha256:6ccdea2c213cf9f3d9490cbd5d427693c870753df41e6cb375bd79bcbafc8817", size = 127330, upload-time = "2026-03-31T16:16:25.496Z" }, ] [[package]] name = "ormsgpack" -version = "1.12.1" +version = "1.12.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/96/34c40d621996c2f377a18decbd3c59f031dde73c3ba47d1e1e8f29a05aaa/ormsgpack-1.12.1.tar.gz", hash = "sha256:a3877fde1e4f27a39f92681a0aab6385af3a41d0c25375d33590ae20410ea2ac", size = 39476, upload-time = "2025-12-14T07:57:43.248Z" } +sdist = { url = "https://files.pythonhosted.org/packages/12/0c/f1761e21486942ab9bb6feaebc610fa074f7c5e496e6962dea5873348077/ormsgpack-1.12.2.tar.gz", hash = "sha256:944a2233640273bee67521795a73cf1e959538e0dfb7ac635505010455e53b33", size = 39031, upload-time = "2026-01-18T20:55:28.023Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/fe/ab9167ca037406b5703add24049cf3e18021a3b16133ea20615b1f160ea4/ormsgpack-1.12.1-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:4d7fb0e1b6fbc701d75269f7405a4f79230a6ce0063fb1092e4f6577e312f86d", size = 376725, upload-time = "2025-12-14T07:57:07.894Z" }, - { url = "https://files.pythonhosted.org/packages/c7/ea/2820e65f506894c459b840d1091ae6e327fde3d5a3f3b002a11a1b9bdf7d/ormsgpack-1.12.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43a9353e2db5b024c91a47d864ef15eaa62d81824cfc7740fed4cef7db738694", size = 202466, upload-time = "2025-12-14T07:57:09.049Z" }, - { url = "https://files.pythonhosted.org/packages/45/8b/def01c13339c5bbec2ee1469ef53e7fadd66c8d775df974ee4def1572515/ormsgpack-1.12.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc8fe866b7706fc25af0adf1f600bc06ece5b15ca44e34641327198b821e5c3c", size = 210748, upload-time = "2025-12-14T07:57:10.074Z" }, - { url = "https://files.pythonhosted.org/packages/5d/d2/bf350c92f7f067dd9484499705f2d8366d8d9008a670e3d1d0add1908f85/ormsgpack-1.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:813755b5f598a78242042e05dfd1ada4e769e94b98c9ab82554550f97ff4d641", size = 211510, upload-time = "2025-12-14T07:57:11.165Z" }, - { url = "https://files.pythonhosted.org/packages/74/92/9d689bcb95304a6da26c4d59439c350940c25d1b35f146d402ccc6344c51/ormsgpack-1.12.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8eea2a13536fae45d78f93f2cc846c9765c7160c85f19cfefecc20873c137cdd", size = 386237, upload-time = "2025-12-14T07:57:12.306Z" }, - { url = "https://files.pythonhosted.org/packages/17/fe/bd3107547f8b6129265dd957f40b9cd547d2445db2292aacb13335a7ea89/ormsgpack-1.12.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7a02ebda1a863cbc604740e76faca8eee1add322db2dcbe6cf32669fffdff65c", size = 479589, upload-time = "2025-12-14T07:57:13.475Z" }, - { url = "https://files.pythonhosted.org/packages/c1/7c/e8e5cc9edb967d44f6f85e9ebdad440b59af3fae00b137a4327dc5aed9bb/ormsgpack-1.12.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3c0bd63897c439931cdf29348e5e6e8c330d529830e848d10767615c0f3d1b82", size = 388077, upload-time = "2025-12-14T07:57:14.551Z" }, - { url = "https://files.pythonhosted.org/packages/35/6b/5031797e43b58506f28a8760b26dc23f2620fb4f2200c4c1b3045603e67e/ormsgpack-1.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:362f2e812f8d7035dc25a009171e09d7cc97cb30d3c9e75a16aeae00ca3c1dcf", size = 116190, upload-time = "2025-12-14T07:57:15.575Z" }, - { url = "https://files.pythonhosted.org/packages/1e/fd/9f43ea6425e383a6b2dbfafebb06fd60e8d68c700ef715adfbcdb499f75d/ormsgpack-1.12.1-cp312-cp312-win_arm64.whl", hash = "sha256:6190281e381db2ed0045052208f47a995ccf61eed48f1215ae3cce3fbccd59c5", size = 109990, upload-time = "2025-12-14T07:57:16.419Z" }, - { url = "https://files.pythonhosted.org/packages/11/42/f110dfe7cf23a52a82e23eb23d9a6a76ae495447d474686dfa758f3d71d6/ormsgpack-1.12.1-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:9663d6b3ecc917c063d61a99169ce196a80f3852e541ae404206836749459279", size = 376746, upload-time = "2025-12-14T07:57:17.699Z" }, - { url = "https://files.pythonhosted.org/packages/11/76/b386e508a8ae207daec240201a81adb26467bf99b163560724e86bd9ff33/ormsgpack-1.12.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32e85cfbaf01a94a92520e7fe7851cfcfe21a5698299c28ab86194895f9b9233", size = 202489, upload-time = "2025-12-14T07:57:18.807Z" }, - { url = "https://files.pythonhosted.org/packages/ea/0e/5db7a63f387149024572daa3d9512fe8fb14bf4efa0722d6d491bed280e7/ormsgpack-1.12.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabfd2c24b59c7c69870a5ecee480dfae914a42a0c2e7c9d971cf531e2ba471a", size = 210757, upload-time = "2025-12-14T07:57:19.893Z" }, - { url = "https://files.pythonhosted.org/packages/64/79/3a9899e57cb57430bd766fc1b4c9ad410cb2ba6070bc8cf6301e7d385768/ormsgpack-1.12.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51bbf2b64afeded34ccd8e25402e4bca038757913931fa0d693078d75563f6f9", size = 211518, upload-time = "2025-12-14T07:57:20.972Z" }, - { url = "https://files.pythonhosted.org/packages/d7/cd/4f41710ae9fe50d7fcbe476793b3c487746d0e1cc194cc0fee42ff6d989b/ormsgpack-1.12.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9959a71dde1bd0ced84af17facc06a8afada495a34e9cb1bad8e9b20d4c59cef", size = 386251, upload-time = "2025-12-14T07:57:22.099Z" }, - { url = "https://files.pythonhosted.org/packages/bf/54/ba0c97d6231b1f01daafaa520c8cce1e1b7fceaae6fdc1c763925874a7de/ormsgpack-1.12.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:e9be0e3b62d758f21f5b20e0e06b3a240ec546c4a327bf771f5825462aa74714", size = 479607, upload-time = "2025-12-14T07:57:23.525Z" }, - { url = "https://files.pythonhosted.org/packages/18/75/19a9a97a462776d525baf41cfb7072734528775f0a3d5fbfab3aa7756b9b/ormsgpack-1.12.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a29d49ab7fdd77ea787818e60cb4ef491708105b9c4c9b0f919201625eb036b5", size = 388062, upload-time = "2025-12-14T07:57:24.616Z" }, - { url = "https://files.pythonhosted.org/packages/a8/6a/ec26e3f44e9632ecd2f43638b7b37b500eaea5d79cab984ad0b94be14f82/ormsgpack-1.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:c418390b47a1d367e803f6c187f77e4d67c7ae07ba962e3a4a019001f4b0291a", size = 116195, upload-time = "2025-12-14T07:57:25.626Z" }, - { url = "https://files.pythonhosted.org/packages/7d/64/bfa5f4a34d0f15c6aba1b73e73f7441a66d635bd03249d334a4796b7a924/ormsgpack-1.12.1-cp313-cp313-win_arm64.whl", hash = "sha256:cfa22c91cffc10a7fbd43729baff2de7d9c28cef2509085a704168ae31f02568", size = 109986, upload-time = "2025-12-14T07:57:26.569Z" }, - { url = "https://files.pythonhosted.org/packages/87/0e/78e5697164e3223b9b216c13e99f1acbc1ee9833490d68842b13da8ba883/ormsgpack-1.12.1-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:b93c91efb1a70751a1902a5b43b27bd8fd38e0ca0365cf2cde2716423c15c3a6", size = 376758, upload-time = "2025-12-14T07:57:27.641Z" }, - { url = "https://files.pythonhosted.org/packages/2c/0e/3a3cbb64703263d7bbaed7effa3ce78cb9add360a60aa7c544d7df28b641/ormsgpack-1.12.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf0ea0389167b5fa8d2933dd3f33e887ec4ba68f89c25214d7eec4afd746d22", size = 202487, upload-time = "2025-12-14T07:57:29.051Z" }, - { url = "https://files.pythonhosted.org/packages/d7/2c/807ebe2b77995599bbb1dec8c3f450d5d7dddee14ce3e1e71dc60e2e2a74/ormsgpack-1.12.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4c29af837f35af3375070689e781161e7cf019eb2f7cd641734ae45cd001c0d", size = 210853, upload-time = "2025-12-14T07:57:30.508Z" }, - { url = "https://files.pythonhosted.org/packages/25/57/2cdfc354e3ad8e847628f511f4d238799d90e9e090941e50b9d5ba955ae2/ormsgpack-1.12.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:336fc65aa0fe65896a3dabaae31e332a0a98b4a00ad7b0afde21a7505fd23ff3", size = 211545, upload-time = "2025-12-14T07:57:31.585Z" }, - { url = "https://files.pythonhosted.org/packages/76/1d/c6fda560e4a8ff865b3aec8a86f7c95ab53f4532193a6ae4ab9db35f85aa/ormsgpack-1.12.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:940f60aabfefe71dd6b82cb33f4ff10b2e7f5fcfa5f103cdb0a23b6aae4c713c", size = 386333, upload-time = "2025-12-14T07:57:32.957Z" }, - { url = "https://files.pythonhosted.org/packages/fc/3e/715081b36fceb8b497c68b87d384e1cc6d9c9c130ce3b435634d3d785b86/ormsgpack-1.12.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:596ad9e1b6d4c95595c54aaf49b1392609ca68f562ce06f4f74a5bc4053bcda4", size = 479701, upload-time = "2025-12-14T07:57:34.686Z" }, - { url = "https://files.pythonhosted.org/packages/6d/cf/01ad04def42b3970fc1a302c07f4b46339edf62ef9650247097260471f40/ormsgpack-1.12.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:575210e8fcbc7b0375026ba040a5eef223e9f66a4453d9623fc23282ae09c3c8", size = 388148, upload-time = "2025-12-14T07:57:35.771Z" }, - { url = "https://files.pythonhosted.org/packages/15/91/1fff2fc2b5943c740028f339154e7103c8f2edf1a881d9fbba2ce11c3b1d/ormsgpack-1.12.1-cp314-cp314-win_amd64.whl", hash = "sha256:647daa3718572280893456be44c60aea6690b7f2edc54c55648ee66e8f06550f", size = 116201, upload-time = "2025-12-14T07:57:36.763Z" }, - { url = "https://files.pythonhosted.org/packages/ed/66/142b542aed3f96002c7d1c33507ca6e1e0d0a42b9253ab27ef7ed5793bd9/ormsgpack-1.12.1-cp314-cp314-win_arm64.whl", hash = "sha256:a8b3ab762a6deaf1b6490ab46dda0c51528cf8037e0246c40875c6fe9e37b699", size = 110029, upload-time = "2025-12-14T07:57:37.703Z" }, - { url = "https://files.pythonhosted.org/packages/38/b3/ef4494438c90359e1547eaed3c5ec46e2c431d59a3de2af4e70ebd594c49/ormsgpack-1.12.1-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:12087214e436c1f6c28491949571abea759a63111908c4f7266586d78144d7a8", size = 376777, upload-time = "2025-12-14T07:57:38.795Z" }, - { url = "https://files.pythonhosted.org/packages/05/a0/1149a7163f8b0dfbc64bf9099b6f16d102ad3b03bcc11afee198d751da2d/ormsgpack-1.12.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6d54c14cf86ef13f10ccade94d1e7de146aa9b17d371e18b16e95f329393b7", size = 202490, upload-time = "2025-12-14T07:57:40.168Z" }, - { url = "https://files.pythonhosted.org/packages/68/82/f2ec5e758d6a7106645cca9bb7137d98bce5d363789fa94075be6572057c/ormsgpack-1.12.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f3584d07882b7ea2a1a589f795a3af97fe4c2932b739408e6d1d9d286cad862", size = 211733, upload-time = "2025-12-14T07:57:42.253Z" }, + { url = "https://files.pythonhosted.org/packages/4c/36/16c4b1921c308a92cef3bf6663226ae283395aa0ff6e154f925c32e91ff5/ormsgpack-1.12.2-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7a29d09b64b9694b588ff2f80e9826bdceb3a2b91523c5beae1fab27d5c940e7", size = 378618, upload-time = "2026-01-18T20:55:50.835Z" }, + { url = "https://files.pythonhosted.org/packages/c0/68/468de634079615abf66ed13bb5c34ff71da237213f29294363beeeca5306/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b39e629fd2e1c5b2f46f99778450b59454d1f901bc507963168985e79f09c5d", size = 203186, upload-time = "2026-01-18T20:56:11.163Z" }, + { url = "https://files.pythonhosted.org/packages/73/a9/d756e01961442688b7939bacd87ce13bfad7d26ce24f910f6028178b2cc8/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:958dcb270d30a7cb633a45ee62b9444433fa571a752d2ca484efdac07480876e", size = 210738, upload-time = "2026-01-18T20:56:09.181Z" }, + { url = "https://files.pythonhosted.org/packages/7b/ba/795b1036888542c9113269a3f5690ab53dd2258c6fb17676ac4bd44fcf94/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d379d72b6c5e964851c77cfedfb386e474adee4fd39791c2c5d9efb53505cc", size = 212569, upload-time = "2026-01-18T20:56:06.135Z" }, + { url = "https://files.pythonhosted.org/packages/6c/aa/bff73c57497b9e0cba8837c7e4bcab584b1a6dbc91a5dd5526784a5030c8/ormsgpack-1.12.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8463a3fc5f09832e67bdb0e2fda6d518dc4281b133166146a67f54c08496442e", size = 387166, upload-time = "2026-01-18T20:55:36.738Z" }, + { url = "https://files.pythonhosted.org/packages/d3/cf/f8283cba44bcb7b14f97b6274d449db276b3a86589bdb363169b51bc12de/ormsgpack-1.12.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:eddffb77eff0bad4e67547d67a130604e7e2dfbb7b0cde0796045be4090f35c6", size = 482498, upload-time = "2026-01-18T20:55:29.626Z" }, + { url = "https://files.pythonhosted.org/packages/05/be/71e37b852d723dfcbe952ad04178c030df60d6b78eba26bfd14c9a40575e/ormsgpack-1.12.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fcd55e5f6ba0dbce624942adf9f152062135f991a0126064889f68eb850de0dd", size = 425518, upload-time = "2026-01-18T20:55:49.556Z" }, + { url = "https://files.pythonhosted.org/packages/7a/0c/9803aa883d18c7ef197213cd2cbf73ba76472a11fe100fb7dab2884edf48/ormsgpack-1.12.2-cp312-cp312-win_amd64.whl", hash = "sha256:d024b40828f1dde5654faebd0d824f9cc29ad46891f626272dd5bfd7af2333a4", size = 117462, upload-time = "2026-01-18T20:55:47.726Z" }, + { url = "https://files.pythonhosted.org/packages/c8/9e/029e898298b2cc662f10d7a15652a53e3b525b1e7f07e21fef8536a09bb8/ormsgpack-1.12.2-cp312-cp312-win_arm64.whl", hash = "sha256:da538c542bac7d1c8f3f2a937863dba36f013108ce63e55745941dda4b75dbb6", size = 111559, upload-time = "2026-01-18T20:55:54.273Z" }, + { url = "https://files.pythonhosted.org/packages/eb/29/bb0eba3288c0449efbb013e9c6f58aea79cf5cb9ee1921f8865f04c1a9d7/ormsgpack-1.12.2-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5ea60cb5f210b1cfbad8c002948d73447508e629ec375acb82910e3efa8ff355", size = 378661, upload-time = "2026-01-18T20:55:57.765Z" }, + { url = "https://files.pythonhosted.org/packages/6e/31/5efa31346affdac489acade2926989e019e8ca98129658a183e3add7af5e/ormsgpack-1.12.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3601f19afdbea273ed70b06495e5794606a8b690a568d6c996a90d7255e51c1", size = 203194, upload-time = "2026-01-18T20:56:08.252Z" }, + { url = "https://files.pythonhosted.org/packages/eb/56/d0087278beef833187e0167f8527235ebe6f6ffc2a143e9de12a98b1ce87/ormsgpack-1.12.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:29a9f17a3dac6054c0dce7925e0f4995c727f7c41859adf9b5572180f640d172", size = 210778, upload-time = "2026-01-18T20:55:17.694Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a2/072343e1413d9443e5a252a8eb591c2d5b1bffbe5e7bfc78c069361b92eb/ormsgpack-1.12.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39c1bd2092880e413902910388be8715f70b9f15f20779d44e673033a6146f2d", size = 212592, upload-time = "2026-01-18T20:55:32.747Z" }, + { url = "https://files.pythonhosted.org/packages/a2/8b/a0da3b98a91d41187a63b02dda14267eefc2a74fcb43cc2701066cf1510e/ormsgpack-1.12.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:50b7249244382209877deedeee838aef1542f3d0fc28b8fe71ca9d7e1896a0d7", size = 387164, upload-time = "2026-01-18T20:55:40.853Z" }, + { url = "https://files.pythonhosted.org/packages/19/bb/6d226bc4cf9fc20d8eb1d976d027a3f7c3491e8f08289a2e76abe96a65f3/ormsgpack-1.12.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:5af04800d844451cf102a59c74a841324868d3f1625c296a06cc655c542a6685", size = 482516, upload-time = "2026-01-18T20:55:42.033Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f1/bb2c7223398543dedb3dbf8bb93aaa737b387de61c5feaad6f908841b782/ormsgpack-1.12.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cec70477d4371cd524534cd16472d8b9cc187e0e3043a8790545a9a9b296c258", size = 425539, upload-time = "2026-01-18T20:55:24.727Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e8/0fb45f57a2ada1fed374f7494c8cd55e2f88ccd0ab0a669aa3468716bf5f/ormsgpack-1.12.2-cp313-cp313-win_amd64.whl", hash = "sha256:21f4276caca5c03a818041d637e4019bc84f9d6ca8baa5ea03e5cc8bf56140e9", size = 117459, upload-time = "2026-01-18T20:55:56.876Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d4/0cfeea1e960d550a131001a7f38a5132c7ae3ebde4c82af1f364ccc5d904/ormsgpack-1.12.2-cp313-cp313-win_arm64.whl", hash = "sha256:baca4b6773d20a82e36d6fd25f341064244f9f86a13dead95dd7d7f996f51709", size = 111577, upload-time = "2026-01-18T20:55:43.605Z" }, + { url = "https://files.pythonhosted.org/packages/94/16/24d18851334be09c25e87f74307c84950f18c324a4d3c0b41dabdbf19c29/ormsgpack-1.12.2-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:bc68dd5915f4acf66ff2010ee47c8906dc1cf07399b16f4089f8c71733f6e36c", size = 378717, upload-time = "2026-01-18T20:55:26.164Z" }, + { url = "https://files.pythonhosted.org/packages/b5/a2/88b9b56f83adae8032ac6a6fa7f080c65b3baf9b6b64fd3d37bd202991d4/ormsgpack-1.12.2-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46d084427b4132553940070ad95107266656cb646ea9da4975f85cb1a6676553", size = 203183, upload-time = "2026-01-18T20:55:18.815Z" }, + { url = "https://files.pythonhosted.org/packages/a9/80/43e4555963bf602e5bdc79cbc8debd8b6d5456c00d2504df9775e74b450b/ormsgpack-1.12.2-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c010da16235806cf1d7bc4c96bf286bfa91c686853395a299b3ddb49499a3e13", size = 210814, upload-time = "2026-01-18T20:55:33.973Z" }, + { url = "https://files.pythonhosted.org/packages/78/e1/7cfbf28de8bca6efe7e525b329c31277d1b64ce08dcba723971c241a9d60/ormsgpack-1.12.2-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18867233df592c997154ff942a6503df274b5ac1765215bceba7a231bea2745d", size = 212634, upload-time = "2026-01-18T20:55:28.634Z" }, + { url = "https://files.pythonhosted.org/packages/95/f8/30ae5716e88d792a4e879debee195653c26ddd3964c968594ddef0a3cc7e/ormsgpack-1.12.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b009049086ddc6b8f80c76b3955df1aa22a5fbd7673c525cd63bf91f23122ede", size = 387139, upload-time = "2026-01-18T20:56:02.013Z" }, + { url = "https://files.pythonhosted.org/packages/dc/81/aee5b18a3e3a0e52f718b37ab4b8af6fae0d9d6a65103036a90c2a8ffb5d/ormsgpack-1.12.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:1dcc17d92b6390d4f18f937cf0b99054824a7815818012ddca925d6e01c2e49e", size = 482578, upload-time = "2026-01-18T20:55:35.117Z" }, + { url = "https://files.pythonhosted.org/packages/bd/17/71c9ba472d5d45f7546317f467a5fc941929cd68fb32796ca3d13dcbaec2/ormsgpack-1.12.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f04b5e896d510b07c0ad733d7fce2d44b260c5e6c402d272128f8941984e4285", size = 425539, upload-time = "2026-01-18T20:56:04.009Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a6/ac99cd7fe77e822fed5250ff4b86fa66dd4238937dd178d2299f10b69816/ormsgpack-1.12.2-cp314-cp314-win_amd64.whl", hash = "sha256:ae3aba7eed4ca7cb79fd3436eddd29140f17ea254b91604aa1eb19bfcedb990f", size = 117493, upload-time = "2026-01-18T20:56:07.343Z" }, + { url = "https://files.pythonhosted.org/packages/3a/67/339872846a1ae4592535385a1c1f93614138566d7af094200c9c3b45d1e5/ormsgpack-1.12.2-cp314-cp314-win_arm64.whl", hash = "sha256:118576ea6006893aea811b17429bfc561b4778fad393f5f538c84af70b01260c", size = 111579, upload-time = "2026-01-18T20:55:21.161Z" }, + { url = "https://files.pythonhosted.org/packages/49/c2/6feb972dc87285ad381749d3882d8aecbde9f6ecf908dd717d33d66df095/ormsgpack-1.12.2-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7121b3d355d3858781dc40dafe25a32ff8a8242b9d80c692fd548a4b1f7fd3c8", size = 378721, upload-time = "2026-01-18T20:55:52.12Z" }, + { url = "https://files.pythonhosted.org/packages/a3/9a/900a6b9b413e0f8a471cf07830f9cf65939af039a362204b36bd5b581d8b/ormsgpack-1.12.2-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ee766d2e78251b7a63daf1cddfac36a73562d3ddef68cacfb41b2af64698033", size = 203170, upload-time = "2026-01-18T20:55:44.469Z" }, + { url = "https://files.pythonhosted.org/packages/87/4c/27a95466354606b256f24fad464d7c97ab62bce6cc529dd4673e1179b8fb/ormsgpack-1.12.2-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292410a7d23de9b40444636b9b8f1e4e4b814af7f1ef476e44887e52a123f09d", size = 212816, upload-time = "2026-01-18T20:55:23.501Z" }, + { url = "https://files.pythonhosted.org/packages/73/cd/29cee6007bddf7a834e6cd6f536754c0535fcb939d384f0f37a38b1cddb8/ormsgpack-1.12.2-cp314-cp314t-win_amd64.whl", hash = "sha256:837dd316584485b72ef451d08dd3e96c4a11d12e4963aedb40e08f89685d8ec2", size = 117232, upload-time = "2026-01-18T20:55:45.448Z" }, ] [[package]] name = "packaging" -version = "25.0" +version = "26.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/f1/e7a6dd94a8d4a5626c03e4e99c87f241ba9e350cd9e6d75123f992427270/packaging-26.2.tar.gz", hash = "sha256:ff452ff5a3e828ce110190feff1178bb1f2ea2281fa2075aadb987c2fb221661", size = 228134, upload-time = "2026-04-24T20:15:23.917Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, + { url = "https://files.pythonhosted.org/packages/df/b2/87e62e8c3e2f4b32e5fe99e0b86d576da1312593b39f47d8ceef365e95ed/packaging-26.2-py3-none-any.whl", hash = "sha256:5fc45236b9446107ff2415ce77c807cee2862cb6fac22b8a73826d0693b0980e", size = 100195, upload-time = "2026-04-24T20:15:22.081Z" }, ] [[package]] name = "pandas" -version = "3.0.0" +version = "3.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, { name = "python-dateutil" }, { name = "tzdata", marker = "sys_platform == 'emscripten' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/de/da/b1dc0481ab8d55d0f46e343cfe67d4551a0e14fcee52bd38ca1bd73258d8/pandas-3.0.0.tar.gz", hash = "sha256:0facf7e87d38f721f0af46fe70d97373a37701b1c09f7ed7aeeb292ade5c050f", size = 4633005, upload-time = "2026-01-21T15:52:04.726Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/99/b342345300f13440fe9fe385c3c481e2d9a595ee3bab4d3219247ac94e9a/pandas-3.0.2.tar.gz", hash = "sha256:f4753e73e34c8d83221ba58f232433fca2748be8b18dbca02d242ed153945043", size = 4645855, upload-time = "2026-03-31T06:48:30.816Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/38/db33686f4b5fa64d7af40d96361f6a4615b8c6c8f1b3d334eee46ae6160e/pandas-3.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9803b31f5039b3c3b10cc858c5e40054adb4b29b4d81cb2fd789f4121c8efbcd", size = 10334013, upload-time = "2026-01-21T15:50:34.771Z" }, - { url = "https://files.pythonhosted.org/packages/a5/7b/9254310594e9774906bacdd4e732415e1f86ab7dbb4b377ef9ede58cd8ec/pandas-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14c2a4099cd38a1d18ff108168ea417909b2dea3bd1ebff2ccf28ddb6a74d740", size = 9874154, upload-time = "2026-01-21T15:50:36.67Z" }, - { url = "https://files.pythonhosted.org/packages/63/d4/726c5a67a13bc66643e66d2e9ff115cead482a44fc56991d0c4014f15aaf/pandas-3.0.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d257699b9a9960e6125686098d5714ac59d05222bef7a5e6af7a7fd87c650801", size = 10384433, upload-time = "2026-01-21T15:50:39.132Z" }, - { url = "https://files.pythonhosted.org/packages/bf/2e/9211f09bedb04f9832122942de8b051804b31a39cfbad199a819bb88d9f3/pandas-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:69780c98f286076dcafca38d8b8eee1676adf220199c0a39f0ecbf976b68151a", size = 10864519, upload-time = "2026-01-21T15:50:41.043Z" }, - { url = "https://files.pythonhosted.org/packages/00/8d/50858522cdc46ac88b9afdc3015e298959a70a08cd21e008a44e9520180c/pandas-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4a66384f017240f3858a4c8a7cf21b0591c3ac885cddb7758a589f0f71e87ebb", size = 11394124, upload-time = "2026-01-21T15:50:43.377Z" }, - { url = "https://files.pythonhosted.org/packages/86/3f/83b2577db02503cd93d8e95b0f794ad9d4be0ba7cb6c8bcdcac964a34a42/pandas-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be8c515c9bc33989d97b89db66ea0cececb0f6e3c2a87fcc8b69443a6923e95f", size = 11920444, upload-time = "2026-01-21T15:50:45.932Z" }, - { url = "https://files.pythonhosted.org/packages/64/2d/4f8a2f192ed12c90a0aab47f5557ece0e56b0370c49de9454a09de7381b2/pandas-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a453aad8c4f4e9f166436994a33884442ea62aa8b27d007311e87521b97246e1", size = 9730970, upload-time = "2026-01-21T15:50:47.962Z" }, - { url = "https://files.pythonhosted.org/packages/d4/64/ff571be435cf1e643ca98d0945d76732c0b4e9c37191a89c8550b105eed1/pandas-3.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:da768007b5a33057f6d9053563d6b74dd6d029c337d93c6d0d22a763a5c2ecc0", size = 9041950, upload-time = "2026-01-21T15:50:50.422Z" }, - { url = "https://files.pythonhosted.org/packages/6f/fa/7f0ac4ca8877c57537aaff2a842f8760e630d8e824b730eb2e859ffe96ca/pandas-3.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b78d646249b9a2bc191040988c7bb524c92fa8534fb0898a0741d7e6f2ffafa6", size = 10307129, upload-time = "2026-01-21T15:50:52.877Z" }, - { url = "https://files.pythonhosted.org/packages/6f/11/28a221815dcea4c0c9414dfc845e34a84a6a7dabc6da3194498ed5ba4361/pandas-3.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bc9cba7b355cb4162442a88ce495e01cb605f17ac1e27d6596ac963504e0305f", size = 9850201, upload-time = "2026-01-21T15:50:54.807Z" }, - { url = "https://files.pythonhosted.org/packages/ba/da/53bbc8c5363b7e5bd10f9ae59ab250fc7a382ea6ba08e4d06d8694370354/pandas-3.0.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c9a1a149aed3b6c9bf246033ff91e1b02d529546c5d6fb6b74a28fea0cf4c70", size = 10354031, upload-time = "2026-01-21T15:50:57.463Z" }, - { url = "https://files.pythonhosted.org/packages/f7/a3/51e02ebc2a14974170d51e2410dfdab58870ea9bcd37cda15bd553d24dc4/pandas-3.0.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95683af6175d884ee89471842acfca29172a85031fccdabc35e50c0984470a0e", size = 10861165, upload-time = "2026-01-21T15:50:59.32Z" }, - { url = "https://files.pythonhosted.org/packages/a5/fe/05a51e3cac11d161472b8297bd41723ea98013384dd6d76d115ce3482f9b/pandas-3.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1fbbb5a7288719e36b76b4f18d46ede46e7f916b6c8d9915b756b0a6c3f792b3", size = 11359359, upload-time = "2026-01-21T15:51:02.014Z" }, - { url = "https://files.pythonhosted.org/packages/ee/56/ba620583225f9b85a4d3e69c01df3e3870659cc525f67929b60e9f21dcd1/pandas-3.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e8b9808590fa364416b49b2a35c1f4cf2785a6c156935879e57f826df22038e", size = 11912907, upload-time = "2026-01-21T15:51:05.175Z" }, - { url = "https://files.pythonhosted.org/packages/c9/8c/c6638d9f67e45e07656b3826405c5cc5f57f6fd07c8b2572ade328c86e22/pandas-3.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:98212a38a709feb90ae658cb6227ea3657c22ba8157d4b8f913cd4c950de5e7e", size = 9732138, upload-time = "2026-01-21T15:51:07.569Z" }, - { url = "https://files.pythonhosted.org/packages/7b/bf/bd1335c3bf1770b6d8fed2799993b11c4971af93bb1b729b9ebbc02ca2ec/pandas-3.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:177d9df10b3f43b70307a149d7ec49a1229a653f907aa60a48f1877d0e6be3be", size = 9033568, upload-time = "2026-01-21T15:51:09.484Z" }, - { url = "https://files.pythonhosted.org/packages/8e/c6/f5e2171914d5e29b9171d495344097d54e3ffe41d2d85d8115baba4dc483/pandas-3.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2713810ad3806767b89ad3b7b69ba153e1c6ff6d9c20f9c2140379b2a98b6c98", size = 10741936, upload-time = "2026-01-21T15:51:11.693Z" }, - { url = "https://files.pythonhosted.org/packages/51/88/9a0164f99510a1acb9f548691f022c756c2314aad0d8330a24616c14c462/pandas-3.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:15d59f885ee5011daf8335dff47dcb8a912a27b4ad7826dc6cbe809fd145d327", size = 10393884, upload-time = "2026-01-21T15:51:14.197Z" }, - { url = "https://files.pythonhosted.org/packages/e0/53/b34d78084d88d8ae2b848591229da8826d1e65aacf00b3abe34023467648/pandas-3.0.0-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24e6547fb64d2c92665dd2adbfa4e85fa4fd70a9c070e7cfb03b629a0bbab5eb", size = 10310740, upload-time = "2026-01-21T15:51:16.093Z" }, - { url = "https://files.pythonhosted.org/packages/5b/d3/bee792e7c3d6930b74468d990604325701412e55d7aaf47460a22311d1a5/pandas-3.0.0-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:48ee04b90e2505c693d3f8e8f524dab8cb8aaf7ddcab52c92afa535e717c4812", size = 10700014, upload-time = "2026-01-21T15:51:18.818Z" }, - { url = "https://files.pythonhosted.org/packages/55/db/2570bc40fb13aaed1cbc3fbd725c3a60ee162477982123c3adc8971e7ac1/pandas-3.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:66f72fb172959af42a459e27a8d8d2c7e311ff4c1f7db6deb3b643dbc382ae08", size = 11323737, upload-time = "2026-01-21T15:51:20.784Z" }, - { url = "https://files.pythonhosted.org/packages/bc/2e/297ac7f21c8181b62a4cccebad0a70caf679adf3ae5e83cb676194c8acc3/pandas-3.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4a4a400ca18230976724a5066f20878af785f36c6756e498e94c2a5e5d57779c", size = 11771558, upload-time = "2026-01-21T15:51:22.977Z" }, - { url = "https://files.pythonhosted.org/packages/0a/46/e1c6876d71c14332be70239acce9ad435975a80541086e5ffba2f249bcf6/pandas-3.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:940eebffe55528074341a5a36515f3e4c5e25e958ebbc764c9502cfc35ba3faa", size = 10473771, upload-time = "2026-01-21T15:51:25.285Z" }, - { url = "https://files.pythonhosted.org/packages/c0/db/0270ad9d13c344b7a36fa77f5f8344a46501abf413803e885d22864d10bf/pandas-3.0.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:597c08fb9fef0edf1e4fa2f9828dd27f3d78f9b8c9b4a748d435ffc55732310b", size = 10312075, upload-time = "2026-01-21T15:51:28.5Z" }, - { url = "https://files.pythonhosted.org/packages/09/9f/c176f5e9717f7c91becfe0f55a52ae445d3f7326b4a2cf355978c51b7913/pandas-3.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:447b2d68ac5edcbf94655fe909113a6dba6ef09ad7f9f60c80477825b6c489fe", size = 9900213, upload-time = "2026-01-21T15:51:30.955Z" }, - { url = "https://files.pythonhosted.org/packages/d9/e7/63ad4cc10b257b143e0a5ebb04304ad806b4e1a61c5da25f55896d2ca0f4/pandas-3.0.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:debb95c77ff3ed3ba0d9aa20c3a2f19165cc7956362f9873fce1ba0a53819d70", size = 10428768, upload-time = "2026-01-21T15:51:33.018Z" }, - { url = "https://files.pythonhosted.org/packages/9e/0e/4e4c2d8210f20149fd2248ef3fff26623604922bd564d915f935a06dd63d/pandas-3.0.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fedabf175e7cd82b69b74c30adbaa616de301291a5231138d7242596fc296a8d", size = 10882954, upload-time = "2026-01-21T15:51:35.287Z" }, - { url = "https://files.pythonhosted.org/packages/c6/60/c9de8ac906ba1f4d2250f8a951abe5135b404227a55858a75ad26f84db47/pandas-3.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:412d1a89aab46889f3033a386912efcdfa0f1131c5705ff5b668dda88305e986", size = 11430293, upload-time = "2026-01-21T15:51:37.57Z" }, - { url = "https://files.pythonhosted.org/packages/a1/69/806e6637c70920e5787a6d6896fd707f8134c2c55cd761e7249a97b7dc5a/pandas-3.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e979d22316f9350c516479dd3a92252be2937a9531ed3a26ec324198a99cdd49", size = 11952452, upload-time = "2026-01-21T15:51:39.618Z" }, - { url = "https://files.pythonhosted.org/packages/cb/de/918621e46af55164c400ab0ef389c9d969ab85a43d59ad1207d4ddbe30a5/pandas-3.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:083b11415b9970b6e7888800c43c82e81a06cd6b06755d84804444f0007d6bb7", size = 9851081, upload-time = "2026-01-21T15:51:41.758Z" }, - { url = "https://files.pythonhosted.org/packages/91/a1/3562a18dd0bd8c73344bfa26ff90c53c72f827df119d6d6b1dacc84d13e3/pandas-3.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:5db1e62cb99e739fa78a28047e861b256d17f88463c76b8dafc7c1338086dca8", size = 9174610, upload-time = "2026-01-21T15:51:44.312Z" }, - { url = "https://files.pythonhosted.org/packages/ce/26/430d91257eaf366f1737d7a1c158677caaf6267f338ec74e3a1ec444111c/pandas-3.0.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:697b8f7d346c68274b1b93a170a70974cdc7d7354429894d5927c1effdcccd73", size = 10761999, upload-time = "2026-01-21T15:51:46.899Z" }, - { url = "https://files.pythonhosted.org/packages/ec/1a/954eb47736c2b7f7fe6a9d56b0cb6987773c00faa3c6451a43db4beb3254/pandas-3.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8cb3120f0d9467ed95e77f67a75e030b67545bcfa08964e349252d674171def2", size = 10410279, upload-time = "2026-01-21T15:51:48.89Z" }, - { url = "https://files.pythonhosted.org/packages/20/fc/b96f3a5a28b250cd1b366eb0108df2501c0f38314a00847242abab71bb3a/pandas-3.0.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33fd3e6baa72899746b820c31e4b9688c8e1b7864d7aec2de7ab5035c285277a", size = 10330198, upload-time = "2026-01-21T15:51:51.015Z" }, - { url = "https://files.pythonhosted.org/packages/90/b3/d0e2952f103b4fbef1ef22d0c2e314e74fc9064b51cee30890b5e3286ee6/pandas-3.0.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8942e333dc67ceda1095227ad0febb05a3b36535e520154085db632c40ad084", size = 10728513, upload-time = "2026-01-21T15:51:53.387Z" }, - { url = "https://files.pythonhosted.org/packages/76/81/832894f286df828993dc5fd61c63b231b0fb73377e99f6c6c369174cf97e/pandas-3.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:783ac35c4d0fe0effdb0d67161859078618b1b6587a1af15928137525217a721", size = 11345550, upload-time = "2026-01-21T15:51:55.329Z" }, - { url = "https://files.pythonhosted.org/packages/34/a0/ed160a00fb4f37d806406bc0a79a8b62fe67f29d00950f8d16203ff3409b/pandas-3.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:125eb901e233f155b268bbef9abd9afb5819db74f0e677e89a61b246228c71ac", size = 11799386, upload-time = "2026-01-21T15:51:57.457Z" }, - { url = "https://files.pythonhosted.org/packages/36/c8/2ac00d7255252c5e3cf61b35ca92ca25704b0188f7454ca4aec08a33cece/pandas-3.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b86d113b6c109df3ce0ad5abbc259fe86a1bd4adfd4a31a89da42f84f65509bb", size = 10873041, upload-time = "2026-01-21T15:52:00.034Z" }, - { url = "https://files.pythonhosted.org/packages/e6/3f/a80ac00acbc6b35166b42850e98a4f466e2c0d9c64054161ba9620f95680/pandas-3.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:1c39eab3ad38f2d7a249095f0a3d8f8c22cc0f847e98ccf5bbe732b272e2d9fa", size = 9441003, upload-time = "2026-01-21T15:52:02.281Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b0/c20bd4d6d3f736e6bd6b55794e9cd0a617b858eaad27c8f410ea05d953b7/pandas-3.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:232a70ebb568c0c4d2db4584f338c1577d81e3af63292208d615907b698a0f18", size = 10347921, upload-time = "2026-03-31T06:46:33.36Z" }, + { url = "https://files.pythonhosted.org/packages/35/d0/4831af68ce30cc2d03c697bea8450e3225a835ef497d0d70f31b8cdde965/pandas-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:970762605cff1ca0d3f71ed4f3a769ea8f85fc8e6348f6e110b8fea7e6eb5a14", size = 9888127, upload-time = "2026-03-31T06:46:36.253Z" }, + { url = "https://files.pythonhosted.org/packages/61/a9/16ea9346e1fc4a96e2896242d9bc674764fb9049b0044c0132502f7a771e/pandas-3.0.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aff4e6f4d722e0652707d7bcb190c445fe58428500c6d16005b02401764b1b3d", size = 10399577, upload-time = "2026-03-31T06:46:39.224Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a8/3a61a721472959ab0ce865ef05d10b0d6bfe27ce8801c99f33d4fa996e65/pandas-3.0.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef8b27695c3d3dc78403c9a7d5e59a62d5464a7e1123b4e0042763f7104dc74f", size = 10880030, upload-time = "2026-03-31T06:46:42.412Z" }, + { url = "https://files.pythonhosted.org/packages/da/65/7225c0ea4d6ce9cb2160a7fb7f39804871049f016e74782e5dade4d14109/pandas-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f8d68083e49e16b84734eb1a4dcae4259a75c90fb6e2251ab9a00b61120c06ab", size = 11409468, upload-time = "2026-03-31T06:46:45.2Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5b/46e7c76032639f2132359b5cf4c785dd8cf9aea5ea64699eac752f02b9db/pandas-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:32cc41f310ebd4a296d93515fcac312216adfedb1894e879303987b8f1e2b97d", size = 11936381, upload-time = "2026-03-31T06:46:48.293Z" }, + { url = "https://files.pythonhosted.org/packages/7b/8b/721a9cff6fa6a91b162eb51019c6243b82b3226c71bb6c8ef4a9bd65cbc6/pandas-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:a4785e1d6547d8427c5208b748ae2efb64659a21bd82bf440d4262d02bfa02a4", size = 9744993, upload-time = "2026-03-31T06:46:51.488Z" }, + { url = "https://files.pythonhosted.org/packages/d5/18/7f0bd34ae27b28159aa80f2a6799f47fda34f7fb938a76e20c7b7fe3b200/pandas-3.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:08504503f7101300107ecdc8df73658e4347586db5cfdadabc1592e9d7e7a0fd", size = 9056118, upload-time = "2026-03-31T06:46:54.548Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ca/3e639a1ea6fcd0617ca4e8ca45f62a74de33a56ae6cd552735470b22c8d3/pandas-3.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5918ba197c951dec132b0c5929a00c0bf05d5942f590d3c10a807f6e15a57d3", size = 10321105, upload-time = "2026-03-31T06:46:57.327Z" }, + { url = "https://files.pythonhosted.org/packages/0b/77/dbc82ff2fb0e63c6564356682bf201edff0ba16c98630d21a1fb312a8182/pandas-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d606a041c89c0a474a4702d532ab7e73a14fe35c8d427b972a625c8e46373668", size = 9864088, upload-time = "2026-03-31T06:46:59.935Z" }, + { url = "https://files.pythonhosted.org/packages/5c/2b/341f1b04bbca2e17e13cd3f08c215b70ef2c60c5356ef1e8c6857449edc7/pandas-3.0.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:710246ba0616e86891b58ab95f2495143bb2bc83ab6b06747c74216f583a6ac9", size = 10369066, upload-time = "2026-03-31T06:47:02.792Z" }, + { url = "https://files.pythonhosted.org/packages/12/c5/cbb1ffefb20a93d3f0e1fdcda699fb84976210d411b008f97f48bf6ce27e/pandas-3.0.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5d3cfe227c725b1f3dff4278b43d8c784656a42a9325b63af6b1492a8232209e", size = 10876780, upload-time = "2026-03-31T06:47:06.205Z" }, + { url = "https://files.pythonhosted.org/packages/98/fe/2249ae5e0a69bd0ddf17353d0a5d26611d70970111f5b3600cdc8be883e7/pandas-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c3b723df9087a9a9a840e263ebd9f88b64a12075d1bf2ea401a5a42f254f084d", size = 11375181, upload-time = "2026-03-31T06:47:09.383Z" }, + { url = "https://files.pythonhosted.org/packages/de/64/77a38b09e70b6464883b8d7584ab543e748e42c1b5d337a2ee088e0df741/pandas-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a3096110bf9eac0070b7208465f2740e2d8a670d5cb6530b5bb884eca495fd39", size = 11928899, upload-time = "2026-03-31T06:47:12.686Z" }, + { url = "https://files.pythonhosted.org/packages/5e/52/42855bf626868413f761addd574acc6195880ae247a5346477a4361c3acb/pandas-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:07a10f5c36512eead51bc578eb3354ad17578b22c013d89a796ab5eee90cd991", size = 9746574, upload-time = "2026-03-31T06:47:15.64Z" }, + { url = "https://files.pythonhosted.org/packages/88/39/21304ae06a25e8bf9fc820d69b29b2c495b2ae580d1e143146c309941760/pandas-3.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:5fdbfa05931071aba28b408e59226186b01eb5e92bea2ab78b65863ca3228d84", size = 9047156, upload-time = "2026-03-31T06:47:18.595Z" }, + { url = "https://files.pythonhosted.org/packages/72/20/7defa8b27d4f330a903bb68eea33be07d839c5ea6bdda54174efcec0e1d2/pandas-3.0.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:dbc20dea3b9e27d0e66d74c42b2d0c1bed9c2ffe92adea33633e3bedeb5ac235", size = 10756238, upload-time = "2026-03-31T06:47:22.012Z" }, + { url = "https://files.pythonhosted.org/packages/e9/95/49433c14862c636afc0e9b2db83ff16b3ad92959364e52b2955e44c8e94c/pandas-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b75c347eff42497452116ce05ef461822d97ce5b9ff8df6edacb8076092c855d", size = 10408520, upload-time = "2026-03-31T06:47:25.197Z" }, + { url = "https://files.pythonhosted.org/packages/3b/f8/462ad2b5881d6b8ec8e5f7ed2ea1893faa02290d13870a1600fe72ad8efc/pandas-3.0.2-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1478075142e83a5571782ad007fb201ed074bdeac7ebcc8890c71442e96adf7", size = 10324154, upload-time = "2026-03-31T06:47:28.097Z" }, + { url = "https://files.pythonhosted.org/packages/0a/65/d1e69b649cbcddda23ad6e4c40ef935340f6f652a006e5cbc3555ac8adb3/pandas-3.0.2-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5880314e69e763d4c8b27937090de570f1fb8d027059a7ada3f7f8e98bdcb677", size = 10714449, upload-time = "2026-03-31T06:47:30.85Z" }, + { url = "https://files.pythonhosted.org/packages/47/a4/85b59bc65b8190ea3689882db6cdf32a5003c0ccd5a586c30fdcc3ffc4fc/pandas-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b5329e26898896f06035241a626d7c335daa479b9bbc82be7c2742d048e41172", size = 11338475, upload-time = "2026-03-31T06:47:34.026Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c4/bc6966c6e38e5d9478b935272d124d80a589511ed1612a5d21d36f664c68/pandas-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:81526c4afd31971f8b62671442a4b2b51e0aa9acc3819c9f0f12a28b6fcf85f1", size = 11786568, upload-time = "2026-03-31T06:47:36.941Z" }, + { url = "https://files.pythonhosted.org/packages/e8/74/09298ca9740beed1d3504e073d67e128aa07e5ca5ca2824b0c674c0b8676/pandas-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:7cadd7e9a44ec13b621aec60f9150e744cfc7a3dd32924a7e2f45edff31823b0", size = 10488652, upload-time = "2026-03-31T06:47:40.612Z" }, + { url = "https://files.pythonhosted.org/packages/bb/40/c6ea527147c73b24fc15c891c3fcffe9c019793119c5742b8784a062c7db/pandas-3.0.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:db0dbfd2a6cdf3770aa60464d50333d8f3d9165b2f2671bcc299b72de5a6677b", size = 10326084, upload-time = "2026-03-31T06:47:43.834Z" }, + { url = "https://files.pythonhosted.org/packages/95/25/bdb9326c3b5455f8d4d3549fce7abcf967259de146fe2cf7a82368141948/pandas-3.0.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0555c5882688a39317179ab4a0ed41d3ebc8812ab14c69364bbee8fb7a3f6288", size = 9914146, upload-time = "2026-03-31T06:47:46.67Z" }, + { url = "https://files.pythonhosted.org/packages/8d/77/3a227ff3337aa376c60d288e1d61c5d097131d0ac71f954d90a8f369e422/pandas-3.0.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:01f31a546acd5574ef77fe199bc90b55527c225c20ccda6601cf6b0fd5ed597c", size = 10444081, upload-time = "2026-03-31T06:47:49.681Z" }, + { url = "https://files.pythonhosted.org/packages/15/88/3cdd54fa279341afa10acf8d2b503556b1375245dccc9315659f795dd2e9/pandas-3.0.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:deeca1b5a931fdf0c2212c8a659ade6d3b1edc21f0914ce71ef24456ca7a6535", size = 10897535, upload-time = "2026-03-31T06:47:53.033Z" }, + { url = "https://files.pythonhosted.org/packages/06/9d/98cc7a7624f7932e40f434299260e2917b090a579d75937cb8a57b9d2de3/pandas-3.0.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0f48afd9bb13300ffb5a3316973324c787054ba6665cda0da3fbd67f451995db", size = 11446992, upload-time = "2026-03-31T06:47:56.193Z" }, + { url = "https://files.pythonhosted.org/packages/9a/cd/19ff605cc3760e80602e6826ddef2824d8e7050ed80f2e11c4b079741dc3/pandas-3.0.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6c4d8458b97a35717b62469a4ea0e85abd5ed8687277f5ccfc67f8a5126f8c53", size = 11968257, upload-time = "2026-03-31T06:47:59.137Z" }, + { url = "https://files.pythonhosted.org/packages/db/60/aba6a38de456e7341285102bede27514795c1eaa353bc0e7638b6b785356/pandas-3.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:b35d14bb5d8285d9494fe93815a9e9307c0876e10f1e8e89ac5b88f728ec8dcf", size = 9865893, upload-time = "2026-03-31T06:48:02.038Z" }, + { url = "https://files.pythonhosted.org/packages/08/71/e5ec979dd2e8a093dacb8864598c0ff59a0cee0bbcdc0bfec16a51684d4f/pandas-3.0.2-cp314-cp314-win_arm64.whl", hash = "sha256:63d141b56ef686f7f0d714cfb8de4e320475b86bf4b620aa0b7da89af8cbdbbb", size = 9188644, upload-time = "2026-03-31T06:48:05.045Z" }, + { url = "https://files.pythonhosted.org/packages/f1/6c/7b45d85db19cae1eb524f2418ceaa9d85965dcf7b764ed151386b7c540f0/pandas-3.0.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:140f0cffb1fa2524e874dde5b477d9defe10780d8e9e220d259b2c0874c89d9d", size = 10776246, upload-time = "2026-03-31T06:48:07.789Z" }, + { url = "https://files.pythonhosted.org/packages/a8/3e/7b00648b086c106e81766f25322b48aa8dfa95b55e621dbdf2fdd413a117/pandas-3.0.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ae37e833ff4fed0ba352f6bdd8b73ba3ab3256a85e54edfd1ab51ae40cca0af8", size = 10424801, upload-time = "2026-03-31T06:48:10.897Z" }, + { url = "https://files.pythonhosted.org/packages/da/6e/558dd09a71b53b4008e7fc8a98ec6d447e9bfb63cdaeea10e5eb9b2dabe8/pandas-3.0.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4d888a5c678a419a5bb41a2a93818e8ed9fd3172246555c0b37b7cc27027effd", size = 10345643, upload-time = "2026-03-31T06:48:13.7Z" }, + { url = "https://files.pythonhosted.org/packages/be/e3/921c93b4d9a280409451dc8d07b062b503bbec0531d2627e73a756e99a82/pandas-3.0.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b444dc64c079e84df91baa8bf613d58405645461cabca929d9178f2cd392398d", size = 10743641, upload-time = "2026-03-31T06:48:16.659Z" }, + { url = "https://files.pythonhosted.org/packages/56/ca/fd17286f24fa3b4d067965d8d5d7e14fe557dd4f979a0b068ac0deaf8228/pandas-3.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4544c7a54920de8eeacaa1466a6b7268ecfbc9bc64ab4dbb89c6bbe94d5e0660", size = 11361993, upload-time = "2026-03-31T06:48:19.475Z" }, + { url = "https://files.pythonhosted.org/packages/e4/a5/2f6ed612056819de445a433ca1f2821ac3dab7f150d569a59e9cc105de1d/pandas-3.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:734be7551687c00fbd760dc0522ed974f82ad230d4a10f54bf51b80d44a08702", size = 11815274, upload-time = "2026-03-31T06:48:22.695Z" }, + { url = "https://files.pythonhosted.org/packages/00/2f/b622683e99ec3ce00b0854bac9e80868592c5b051733f2cf3a868e5fea26/pandas-3.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:57a07209bebcbcf768d2d13c9b78b852f9a15978dac41b9e6421a81ad4cdd276", size = 10888530, upload-time = "2026-03-31T06:48:25.806Z" }, + { url = "https://files.pythonhosted.org/packages/cb/2b/f8434233fab2bd66a02ec014febe4e5adced20e2693e0e90a07d118ed30e/pandas-3.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:5371b72c2d4d415d08765f32d689217a43227484e81b2305b52076e328f6f482", size = 9455341, upload-time = "2026-03-31T06:48:28.418Z" }, +] + +[[package]] +name = "pathspec" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/17/9c3094b822982b9f1ea666d8580ce59000f61f87c1663556fb72031ad9ec/pathspec-1.1.0.tar.gz", hash = "sha256:f5d7c555da02fd8dde3e4a2354b6aba817a89112fa8f333f7917a2a4834dd080", size = 133918, upload-time = "2026-04-23T01:46:22.298Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/c9/8eed0486f074e9f1ca7f8ce5ad663e65f12fdab344028d658fa1b03d35e0/pathspec-1.1.0-py3-none-any.whl", hash = "sha256:574b128f7456bd899045ccd142dd446af7e6cfd0072d63ad73fbc55fbb4aaa42", size = 56264, upload-time = "2026-04-23T01:46:20.606Z" }, ] [[package]] @@ -2693,18 +2968,52 @@ wheels = [ [[package]] name = "primp" -version = "0.15.0" +version = "1.2.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/56/0b/a87556189da4de1fc6360ca1aa05e8335509633f836cdd06dd17f0743300/primp-0.15.0.tar.gz", hash = "sha256:1af8ea4b15f57571ff7fc5e282a82c5eb69bc695e19b8ddeeda324397965b30a", size = 113022, upload-time = "2025-04-17T11:41:05.315Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/ca/383bdf0df3dc87b60bf73c55da526ac743d42c5155a84a9014775b895e96/primp-1.2.3.tar.gz", hash = "sha256:a531b01f57cb59e3e7a3a2b526bb151b61dc7b2e15d2f6961615a553632e2889", size = 1342866, upload-time = "2026-04-20T08:34:09.018Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f5/5a/146ac964b99ea7657ad67eb66f770be6577dfe9200cb28f9a95baffd6c3f/primp-0.15.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1b281f4ca41a0c6612d4c6e68b96e28acfe786d226a427cd944baa8d7acd644f", size = 3178914, upload-time = "2025-04-17T11:40:59.558Z" }, - { url = "https://files.pythonhosted.org/packages/bc/8a/cc2321e32db3ce64d6e32950d5bcbea01861db97bfb20b5394affc45b387/primp-0.15.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:489cbab55cd793ceb8f90bb7423c6ea64ebb53208ffcf7a044138e3c66d77299", size = 2955079, upload-time = "2025-04-17T11:40:57.398Z" }, - { url = "https://files.pythonhosted.org/packages/c3/7b/cbd5d999a07ff2a21465975d4eb477ae6f69765e8fe8c9087dab250180d8/primp-0.15.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b45c23f94016215f62d2334552224236217aaeb716871ce0e4dcfa08eb161", size = 3281018, upload-time = "2025-04-17T11:40:55.308Z" }, - { url = "https://files.pythonhosted.org/packages/1b/6e/a6221c612e61303aec2bcac3f0a02e8b67aee8c0db7bdc174aeb8010f975/primp-0.15.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e985a9cba2e3f96a323722e5440aa9eccaac3178e74b884778e926b5249df080", size = 3255229, upload-time = "2025-04-17T11:40:47.811Z" }, - { url = "https://files.pythonhosted.org/packages/3b/54/bfeef5aca613dc660a69d0760a26c6b8747d8fdb5a7f20cb2cee53c9862f/primp-0.15.0-cp38-abi3-manylinux_2_34_armv7l.whl", hash = "sha256:6b84a6ffa083e34668ff0037221d399c24d939b5629cd38223af860de9e17a83", size = 3014522, upload-time = "2025-04-17T11:40:50.191Z" }, - { url = "https://files.pythonhosted.org/packages/ac/96/84078e09f16a1dad208f2fe0f8a81be2cf36e024675b0f9eec0c2f6e2182/primp-0.15.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:592f6079646bdf5abbbfc3b0a28dac8de943f8907a250ce09398cda5eaebd260", size = 3418567, upload-time = "2025-04-17T11:41:01.595Z" }, - { url = "https://files.pythonhosted.org/packages/6c/80/8a7a9587d3eb85be3d0b64319f2f690c90eb7953e3f73a9ddd9e46c8dc42/primp-0.15.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5a728e5a05f37db6189eb413d22c78bd143fa59dd6a8a26dacd43332b3971fe8", size = 3606279, upload-time = "2025-04-17T11:41:03.61Z" }, - { url = "https://files.pythonhosted.org/packages/0c/dd/f0183ed0145e58cf9d286c1b2c14f63ccee987a4ff79ac85acc31b5d86bd/primp-0.15.0-cp38-abi3-win_amd64.whl", hash = "sha256:aeb6bd20b06dfc92cfe4436939c18de88a58c640752cf7f30d9e4ae893cdec32", size = 3149967, upload-time = "2025-04-17T11:41:07.067Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b1/05bb7e00bd17a439aae7ed49b0c0834508e3ce50624dfa43c6dcb8b71bd0/primp-1.2.3-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:e96d6ab40fba41039947dad0fcc42b0b56b67180883e526715720bb2d90f3bfc", size = 4360352, upload-time = "2026-04-20T08:33:52.785Z" }, + { url = "https://files.pythonhosted.org/packages/7e/db/8bb1e4b6bb715f606b53793831e7910aebeb40169e439138e9124686820a/primp-1.2.3-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:42f28679916ce080e643e7464786abeb659c8062c0f74bb411918c7f07e5b806", size = 4035926, upload-time = "2026-04-20T08:34:06.348Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/52b5e6d840be4d5a8f689d9ba82dd616c67e29e3e1d13baf6a4c9be3f4b3/primp-1.2.3-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643d47cf24962331ad2b049d6bb4329dce6b18a0914490dbf09541cb38596d39", size = 4309649, upload-time = "2026-04-20T08:34:01.369Z" }, + { url = "https://files.pythonhosted.org/packages/40/85/d98e20104429bb8393939396c2317ec6163a83d856b1fe555bf5f021e97a/primp-1.2.3-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:898a12b44af9aed20c10fd4b497314731e9f6dcab20f4aa64cf118f79df17fa0", size = 3910331, upload-time = "2026-04-20T08:33:37.294Z" }, + { url = "https://files.pythonhosted.org/packages/8e/c7/53f11e2e2fe758830f6f208cef5bd3d0ecc6f538c0a2ca8e15a1fa502bd1/primp-1.2.3-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4bdf2b164c2908f7bdc845215dd21ebded1f2f43e9e9ae2d9a961ea56e5cb87", size = 4152054, upload-time = "2026-04-20T08:33:56.193Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5a/48e8f985daeeb58c7f1789bb6748d9aba250ea3541d787bcbcb1243abfd3/primp-1.2.3-cp310-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:036884d4c6c866c93a88e591e49f67ed160f6a9d98c779fc652cce63de62996a", size = 4443286, upload-time = "2026-04-20T08:34:07.782Z" }, + { url = "https://files.pythonhosted.org/packages/5e/81/42b32337bd8c7b7b8184a1fcd79fd51d4773427553a8d2036eb0a93a137d/primp-1.2.3-cp310-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bfc695c20f5c6e345abc3262bef3c246a571986db5cea73bdc41db6b166066c8", size = 4323757, upload-time = "2026-04-20T08:33:43.807Z" }, + { url = "https://files.pythonhosted.org/packages/37/43/9ee78c283cbca7fcd635c2a9bb5633aa6568b1925958017e1a979fffe56c/primp-1.2.3-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13255b0826c60681478c787fbe29cfc773caf6242390fee047dac0f23f6e8c11", size = 4525772, upload-time = "2026-04-20T08:33:45.427Z" }, + { url = "https://files.pythonhosted.org/packages/c5/41/f10164485d84145a1ea18c7966d1ee098d1790b2088b7f122570463b7d5c/primp-1.2.3-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9bc40f94c8e58444befaf7e78b8aadd96e94e32789dadbe4a03785db772aa4dc", size = 4471705, upload-time = "2026-04-20T08:33:54.317Z" }, + { url = "https://files.pythonhosted.org/packages/18/a2/5328d66dd8f63bacfc9ef0e1e5fde66b2bb43103108fe8f01dcb2d2f0e50/primp-1.2.3-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:2e0e8e245113ab3c9fd4b36014b3a04869cf08f72e8e7f36c4f5ef46d26da090", size = 4148309, upload-time = "2026-04-20T08:33:31.609Z" }, + { url = "https://files.pythonhosted.org/packages/c9/86/4d000d3ee341e5f1e73d81fb2c84e985f23b343a1aa4234c40987ec6eae7/primp-1.2.3-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:3bb50934b5e209e7da4876d3419ebc23d1425fe6f089c0cd95382d21bd8a39bd", size = 4276881, upload-time = "2026-04-20T08:33:34.414Z" }, + { url = "https://files.pythonhosted.org/packages/22/f5/50acc3e349d22044abf4ed7c2abaaba11a01b9ccb6a7d66a3fbc2275c590/primp-1.2.3-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b293f13078afee9d41b74c77d05a5eaeb57dfab5110648dd24bc3fb3c750a05c", size = 4775229, upload-time = "2026-04-20T08:34:10.241Z" }, + { url = "https://files.pythonhosted.org/packages/3a/d6/2c482973211666fdf2c4365babf343a88ea30f753ef650121cebd4ad7ece/primp-1.2.3-cp310-abi3-win32.whl", hash = "sha256:c600dd83e9c978bf494aab072cd5bbfdd59b131f3afc353850c53373a86992e5", size = 3504032, upload-time = "2026-04-20T08:33:28.344Z" }, + { url = "https://files.pythonhosted.org/packages/70/89/3d2032a8f5e986fcc03bc86ac98705ce2bd35ed0e182d4949c159214da6a/primp-1.2.3-cp310-abi3-win_amd64.whl", hash = "sha256:3cbbe52a6eb51a4831d3dd35055f13b28ff5b9be2487c14ffe66922bf8028b49", size = 3872596, upload-time = "2026-04-20T08:33:46.927Z" }, + { url = "https://files.pythonhosted.org/packages/79/00/f726d54ff00213641069a66ee2fadf17f01f77a2f1ba92de229830056419/primp-1.2.3-cp310-abi3-win_arm64.whl", hash = "sha256:03c668481b2cf34552880f4b6ebabfa913fdaeb2921ce982e42c428f451b630c", size = 3875239, upload-time = "2026-04-20T08:33:32.981Z" }, + { url = "https://files.pythonhosted.org/packages/5c/e6/1fe1bcaf7566d7e6c9b39748f0d6ded857c80cf3252acf6aa3c6b7b8dbb0/primp-1.2.3-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:0148fd5c0502bb88a26217b606ebb254de44a666dac52657ff97f727577bc7ce", size = 4348055, upload-time = "2026-04-20T08:33:48.193Z" }, + { url = "https://files.pythonhosted.org/packages/c0/46/cae79466969a31d885409ce716eb5a4af66f66d1121e63ee3ddd7d666b9e/primp-1.2.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:13bfc39172a083cc5520d9045a99988d7f8502458be139d6b1926de4d57e30e9", size = 4034114, upload-time = "2026-04-20T08:33:26.888Z" }, + { url = "https://files.pythonhosted.org/packages/f5/c3/e8b24ff42ef6bf71f8b4277fd6d83a139d9b9ad14b0ffb4c76a8e9225a15/primp-1.2.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:283e751671e78cbac9f1ff72e89225d74bcd9ea08886910160f485cd78a55f2b", size = 4303794, upload-time = "2026-04-20T08:34:04.947Z" }, + { url = "https://files.pythonhosted.org/packages/72/b4/9c59197fee68abdc53683b65b6b0e5ea6e0f098cc2527af29edd05b22ed4/primp-1.2.3-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec5d94244f24c61b350791112c1687c97d144f400a7ea5d8476dbd2fad6de9af", size = 3908577, upload-time = "2026-04-20T08:33:59.567Z" }, + { url = "https://files.pythonhosted.org/packages/71/b2/25c7cfcfe33f2fa82ab8cc72f4082fc12c8f803ceb2624c787fa72c73e12/primp-1.2.3-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48d459dda86bfddce3a14b514694e0c8028f20cc461dd52e105aba400c622513", size = 4153501, upload-time = "2026-04-20T08:34:03.22Z" }, + { url = "https://files.pythonhosted.org/packages/04/9b/83e61b32b9049478f63c1956ff3244619691c3edf6e96b19254047756b36/primp-1.2.3-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0d8441f07c948a54ff21bfc4094cd2967b5fb6f8a9ef0c9562a1eb35da0127e", size = 4440289, upload-time = "2026-04-20T08:33:24.734Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3f/48c351f601f204c52d5a477d7a19efcd8b7deb6478c57195fbe8bf9c3632/primp-1.2.3-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:728b6f63552f1bc729e0490ee69a3a9fbd39698c1578b0e6313f3779de469a8c", size = 4306848, upload-time = "2026-04-20T08:33:40.482Z" }, + { url = "https://files.pythonhosted.org/packages/ee/9a/683b2fbb8c2df3b3d2b351e86eec0a873479b95bb804e5d882939057366b/primp-1.2.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9952509dcd8aa8750f4cdfc86b24cdcec96f9342a6525003bc3446ba466d5512", size = 4521679, upload-time = "2026-04-20T08:33:38.699Z" }, + { url = "https://files.pythonhosted.org/packages/80/eb/bbbbc79cf4132f2beeb0a0f99f41837dd66c158d0439f59df75a2d592d0a/primp-1.2.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e74a6aa83aa65d02665ad5d0e53d7877ea8abce6f3d50ed92495f9c4c9d10e2e", size = 4469041, upload-time = "2026-04-20T08:33:51.161Z" }, + { url = "https://files.pythonhosted.org/packages/f7/5c/208fe1c6e9d3a28c6eae530378ac9c60fe8ae389f68a4707ef7986d5b133/primp-1.2.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:1dfc4dd5bf7e4f9a45f9c7ae1256f957cc7ca8d5344f82d335129a13e1d7ffea", size = 4144925, upload-time = "2026-04-20T08:33:35.94Z" }, + { url = "https://files.pythonhosted.org/packages/da/06/ce175b54edecf22b750da67bc095bea53f2d87d191c4461b30122c7ddd1e/primp-1.2.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:97c37df6fc7eb989f324b326e0547b23757a82f9d8b334075d4b0bd29e310723", size = 4276929, upload-time = "2026-04-20T08:33:49.521Z" }, + { url = "https://files.pythonhosted.org/packages/2e/12/a913ab53ad61fe51d44ef0f8fee6b63a897d2b9c8a4b43299d7f5decc003/primp-1.2.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ead9c8b660fa0ebb193317b85d61dd1bd840a3e97e882b33b896dc9e37ed6d63", size = 4772684, upload-time = "2026-04-20T08:33:42.456Z" }, + { url = "https://files.pythonhosted.org/packages/7c/2f/2c3aafb2814703979bf5a216575f4478bcf6070bc6ec143056cf3b56c134/primp-1.2.3-cp314-cp314t-win32.whl", hash = "sha256:cb52db4d54105097773c0df4e2ef0ce7b42461d9d4d3ec28a0622ca2d22945bb", size = 3499126, upload-time = "2026-04-20T08:33:29.953Z" }, + { url = "https://files.pythonhosted.org/packages/54/f0/bad9c739a35f4cf69e2f39c01f664c9a5969e753c4cfcdc33e113cc984af/primp-1.2.3-cp314-cp314t-win_amd64.whl", hash = "sha256:d85a44585df27038e18298f7e35335c4961926e7401edd1a1bf4f7f967b3f107", size = 3870129, upload-time = "2026-04-20T08:34:11.666Z" }, + { url = "https://files.pythonhosted.org/packages/67/78/882563dc554c5f710b4e47c58978586c2528703435800a7a84e81b339d90/primp-1.2.3-cp314-cp314t-win_arm64.whl", hash = "sha256:6513dcb55e6b511c1ae383a3a0e887929b09d8d6b2c70e2b188b425c51471a02", size = 3874087, upload-time = "2026-04-20T08:33:58.109Z" }, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.52" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, ] [[package]] @@ -2793,17 +3102,87 @@ wheels = [ [[package]] name = "protobuf" -version = "6.33.5" +version = "6.33.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/70/e908e9c5e52ef7c3a6c7902c9dfbb34c7e29c25d2f81ade3856445fd5c94/protobuf-6.33.6.tar.gz", hash = "sha256:a6768d25248312c297558af96a9f9c929e8c4cee0659cb07e780731095f38135", size = 444531, upload-time = "2026-03-18T19:05:00.988Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, - { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, - { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, - { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" }, - { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" }, - { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" }, - { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, + { url = "https://files.pythonhosted.org/packages/fc/9f/2f509339e89cfa6f6a4c4ff50438db9ca488dec341f7e454adad60150b00/protobuf-6.33.6-cp310-abi3-win32.whl", hash = "sha256:7d29d9b65f8afef196f8334e80d6bc1d5d4adedb449971fefd3723824e6e77d3", size = 425739, upload-time = "2026-03-18T19:04:48.373Z" }, + { url = "https://files.pythonhosted.org/packages/76/5d/683efcd4798e0030c1bab27374fd13a89f7c2515fb1f3123efdfaa5eab57/protobuf-6.33.6-cp310-abi3-win_amd64.whl", hash = "sha256:0cd27b587afca21b7cfa59a74dcbd48a50f0a6400cfb59391340ad729d91d326", size = 437089, upload-time = "2026-03-18T19:04:50.381Z" }, + { url = "https://files.pythonhosted.org/packages/5c/01/a3c3ed5cd186f39e7880f8303cc51385a198a81469d53d0fdecf1f64d929/protobuf-6.33.6-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:9720e6961b251bde64edfdab7d500725a2af5280f3f4c87e57c0208376aa8c3a", size = 427737, upload-time = "2026-03-18T19:04:51.866Z" }, + { url = "https://files.pythonhosted.org/packages/ee/90/b3c01fdec7d2f627b3a6884243ba328c1217ed2d978def5c12dc50d328a3/protobuf-6.33.6-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:e2afbae9b8e1825e3529f88d514754e094278bb95eadc0e199751cdd9a2e82a2", size = 324610, upload-time = "2026-03-18T19:04:53.096Z" }, + { url = "https://files.pythonhosted.org/packages/9b/ca/25afc144934014700c52e05103c2421997482d561f3101ff352e1292fb81/protobuf-6.33.6-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:c96c37eec15086b79762ed265d59ab204dabc53056e3443e702d2681f4b39ce3", size = 339381, upload-time = "2026-03-18T19:04:54.616Z" }, + { url = "https://files.pythonhosted.org/packages/16/92/d1e32e3e0d894fe00b15ce28ad4944ab692713f2e7f0a99787405e43533a/protobuf-6.33.6-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:e9db7e292e0ab79dd108d7f1a94fe31601ce1ee3f7b79e0692043423020b0593", size = 323436, upload-time = "2026-03-18T19:04:55.768Z" }, + { url = "https://files.pythonhosted.org/packages/c4/72/02445137af02769918a93807b2b7890047c32bfb9f90371cbc12688819eb/protobuf-6.33.6-py3-none-any.whl", hash = "sha256:77179e006c476e69bf8e8ce866640091ec42e1beb80b213c3900006ecfba6901", size = 170656, upload-time = "2026-03-18T19:04:59.826Z" }, +] + +[[package]] +name = "psycopg" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/b6/379d0a960f8f435ec78720462fd94c4863e7a31237cf81bf76d0af5883bf/psycopg-3.3.3.tar.gz", hash = "sha256:5e9a47458b3c1583326513b2556a2a9473a1001a56c9efe9e587245b43148dd9", size = 165624, upload-time = "2026-02-18T16:52:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/5b/181e2e3becb7672b502f0ed7f16ed7352aca7c109cfb94cf3878a9186db9/psycopg-3.3.3-py3-none-any.whl", hash = "sha256:f96525a72bcfade6584ab17e89de415ff360748c766f0106959144dcbb38c698", size = 212768, upload-time = "2026-02-18T16:46:27.365Z" }, +] + +[package.optional-dependencies] +binary = [ + { name = "psycopg-binary", marker = "implementation_name != 'pypy'" }, +] + +[[package]] +name = "psycopg-binary" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/15/021be5c0cbc5b7c1ab46e91cc3434eb42569f79a0592e67b8d25e66d844d/psycopg_binary-3.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6698dbab5bcef8fdb570fc9d35fd9ac52041771bfcfe6fd0fc5f5c4e36f1e99d", size = 4591170, upload-time = "2026-02-18T16:48:55.594Z" }, + { url = "https://files.pythonhosted.org/packages/f1/54/a60211c346c9a2f8c6b272b5f2bbe21f6e11800ce7f61e99ba75cf8b63e1/psycopg_binary-3.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:329ff393441e75f10b673ae99ab45276887993d49e65f141da20d915c05aafd8", size = 4670009, upload-time = "2026-02-18T16:49:03.608Z" }, + { url = "https://files.pythonhosted.org/packages/c1/53/ac7c18671347c553362aadbf65f92786eef9540676ca24114cc02f5be405/psycopg_binary-3.3.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:eb072949b8ebf4082ae24289a2b0fd724da9adc8f22743409d6fd718ddb379df", size = 5469735, upload-time = "2026-02-18T16:49:10.128Z" }, + { url = "https://files.pythonhosted.org/packages/7f/c3/4f4e040902b82a344eff1c736cde2f2720f127fe939c7e7565706f96dd44/psycopg_binary-3.3.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:263a24f39f26e19ed7fc982d7859a36f17841b05bebad3eb47bb9cd2dd785351", size = 5152919, upload-time = "2026-02-18T16:49:16.335Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e7/d929679c6a5c212bcf738806c7c89f5b3d0919f2e1685a0e08d6ff877945/psycopg_binary-3.3.3-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5152d50798c2fa5bd9b68ec68eb68a1b71b95126c1d70adaa1a08cd5eefdc23d", size = 6738785, upload-time = "2026-02-18T16:49:22.687Z" }, + { url = "https://files.pythonhosted.org/packages/69/b0/09703aeb69a9443d232d7b5318d58742e8ca51ff79f90ffe6b88f1db45e7/psycopg_binary-3.3.3-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9d6a1e56dd267848edb824dbeb08cf5bac649e02ee0b03ba883ba3f4f0bd54f2", size = 4979008, upload-time = "2026-02-18T16:49:27.313Z" }, + { url = "https://files.pythonhosted.org/packages/cc/a6/e662558b793c6e13a7473b970fee327d635270e41eded3090ef14045a6a5/psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73eaaf4bb04709f545606c1db2f65f4000e8a04cdbf3e00d165a23004692093e", size = 4508255, upload-time = "2026-02-18T16:49:31.575Z" }, + { url = "https://files.pythonhosted.org/packages/5f/7f/0f8b2e1d5e0093921b6f324a948a5c740c1447fbb45e97acaf50241d0f39/psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:162e5675efb4704192411eaf8e00d07f7960b679cd3306e7efb120bb8d9456cc", size = 4189166, upload-time = "2026-02-18T16:49:35.801Z" }, + { url = "https://files.pythonhosted.org/packages/92/ec/ce2e91c33bc8d10b00c87e2f6b0fb570641a6a60042d6a9ae35658a3a797/psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:fab6b5e37715885c69f5d091f6ff229be71e235f272ebaa35158d5a46fd548a0", size = 3924544, upload-time = "2026-02-18T16:49:41.129Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2f/7718141485f73a924205af60041c392938852aa447a94c8cbd222ff389a1/psycopg_binary-3.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a4aab31bd6d1057f287c96c0effca3a25584eb9cc702f282ecb96ded7814e830", size = 4235297, upload-time = "2026-02-18T16:49:46.726Z" }, + { url = "https://files.pythonhosted.org/packages/57/f9/1add717e2643a003bbde31b1b220172e64fbc0cb09f06429820c9173f7fc/psycopg_binary-3.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:59aa31fe11a0e1d1bcc2ce37ed35fe2ac84cd65bb9036d049b1a1c39064d0f14", size = 3547659, upload-time = "2026-02-18T16:49:52.999Z" }, + { url = "https://files.pythonhosted.org/packages/03/0a/cac9fdf1df16a269ba0e5f0f06cac61f826c94cadb39df028cdfe19d3a33/psycopg_binary-3.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05f32239aec25c5fb15f7948cffdc2dc0dac098e48b80a140e4ba32b572a2e7d", size = 4590414, upload-time = "2026-02-18T16:50:01.441Z" }, + { url = "https://files.pythonhosted.org/packages/9c/c0/d8f8508fbf440edbc0099b1abff33003cd80c9e66eb3a1e78834e3fb4fb9/psycopg_binary-3.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c84f9d214f2d1de2fafebc17fa68ac3f6561a59e291553dfc45ad299f4898c1", size = 4669021, upload-time = "2026-02-18T16:50:08.803Z" }, + { url = "https://files.pythonhosted.org/packages/04/05/097016b77e343b4568feddf12c72171fc513acef9a4214d21b9478569068/psycopg_binary-3.3.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e77957d2ba17cada11be09a5066d93026cdb61ada7c8893101d7fe1c6e1f3925", size = 5467453, upload-time = "2026-02-18T16:50:14.985Z" }, + { url = "https://files.pythonhosted.org/packages/91/23/73244e5feb55b5ca109cede6e97f32ef45189f0fdac4c80d75c99862729d/psycopg_binary-3.3.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:42961609ac07c232a427da7c87a468d3c82fee6762c220f38e37cfdacb2b178d", size = 5151135, upload-time = "2026-02-18T16:50:24.82Z" }, + { url = "https://files.pythonhosted.org/packages/11/49/5309473b9803b207682095201d8708bbc7842ddf3f192488a69204e36455/psycopg_binary-3.3.3-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae07a3114313dd91fce686cab2f4c44af094398519af0e0f854bc707e1aeedf1", size = 6737315, upload-time = "2026-02-18T16:50:35.106Z" }, + { url = "https://files.pythonhosted.org/packages/d4/5d/03abe74ef34d460b33c4d9662bf6ec1dd38888324323c1a1752133c10377/psycopg_binary-3.3.3-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d257c58d7b36a621dcce1d01476ad8b60f12d80eb1406aee4cf796f88b2ae482", size = 4979783, upload-time = "2026-02-18T16:50:42.067Z" }, + { url = "https://files.pythonhosted.org/packages/f0/6c/3fbf8e604e15f2f3752900434046c00c90bb8764305a1b81112bff30ba24/psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:07c7211f9327d522c9c47560cae00a4ecf6687f4e02d779d035dd3177b41cb12", size = 4509023, upload-time = "2026-02-18T16:50:50.116Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6b/1a06b43b7c7af756c80b67eac8bfaa51d77e68635a8a8d246e4f0bb7604a/psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:8e7e9eca9b363dbedeceeadd8be97149d2499081f3c52d141d7cd1f395a91f83", size = 4185874, upload-time = "2026-02-18T16:50:55.97Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d3/bf49e3dcaadba510170c8d111e5e69e5ae3f981c1554c5bb71c75ce354bb/psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:cb85b1d5702877c16f28d7b92ba030c1f49ebcc9b87d03d8c10bf45a2f1c7508", size = 3925668, upload-time = "2026-02-18T16:51:03.299Z" }, + { url = "https://files.pythonhosted.org/packages/f8/92/0aac830ed6a944fe334404e1687a074e4215630725753f0e3e9a9a595b62/psycopg_binary-3.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4d4606c84d04b80f9138d72f1e28c6c02dc5ae0c7b8f3f8aaf89c681ce1cd1b1", size = 4234973, upload-time = "2026-02-18T16:51:09.097Z" }, + { url = "https://files.pythonhosted.org/packages/2e/96/102244653ee5a143ece5afe33f00f52fe64e389dfce8dbc87580c6d70d3d/psycopg_binary-3.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:74eae563166ebf74e8d950ff359be037b85723d99ca83f57d9b244a871d6c13b", size = 3551342, upload-time = "2026-02-18T16:51:13.892Z" }, + { url = "https://files.pythonhosted.org/packages/a2/71/7a57e5b12275fe7e7d84d54113f0226080423a869118419c9106c083a21c/psycopg_binary-3.3.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:497852c5eaf1f0c2d88ab74a64a8097c099deac0c71de1cbcf18659a8a04a4b2", size = 4607368, upload-time = "2026-02-18T16:51:19.295Z" }, + { url = "https://files.pythonhosted.org/packages/c7/04/cb834f120f2b2c10d4003515ef9ca9d688115b9431735e3936ae48549af8/psycopg_binary-3.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:258d1ea53464d29768bf25930f43291949f4c7becc706f6e220c515a63a24edd", size = 4687047, upload-time = "2026-02-18T16:51:23.84Z" }, + { url = "https://files.pythonhosted.org/packages/40/e9/47a69692d3da9704468041aa5ed3ad6fc7f6bb1a5ae788d261a26bbca6c7/psycopg_binary-3.3.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:111c59897a452196116db12e7f608da472fbff000693a21040e35fc978b23430", size = 5487096, upload-time = "2026-02-18T16:51:29.645Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b6/0e0dd6a2f802864a4ae3dbadf4ec620f05e3904c7842b326aafc43e5f464/psycopg_binary-3.3.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:17bb6600e2455993946385249a3c3d0af52cd70c1c1cdbf712e9d696d0b0bf1b", size = 5168720, upload-time = "2026-02-18T16:51:36.499Z" }, + { url = "https://files.pythonhosted.org/packages/6f/0d/977af38ac19a6b55d22dff508bd743fd7c1901e1b73657e7937c7cccb0a3/psycopg_binary-3.3.3-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:642050398583d61c9856210568eb09a8e4f2fe8224bf3be21b67a370e677eead", size = 6762076, upload-time = "2026-02-18T16:51:43.167Z" }, + { url = "https://files.pythonhosted.org/packages/34/40/912a39d48322cf86895c0eaf2d5b95cb899402443faefd4b09abbba6b6e1/psycopg_binary-3.3.3-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:533efe6dc3a7cba5e2a84e38970786bb966306863e45f3db152007e9f48638a6", size = 4997623, upload-time = "2026-02-18T16:51:47.707Z" }, + { url = "https://files.pythonhosted.org/packages/98/0c/c14d0e259c65dc7be854d926993f151077887391d5a081118907a9d89603/psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5958dbf28b77ce2033482f6cb9ef04d43f5d8f4b7636e6963d5626f000efb23e", size = 4532096, upload-time = "2026-02-18T16:51:51.421Z" }, + { url = "https://files.pythonhosted.org/packages/39/21/8b7c50a194cfca6ea0fd4d1f276158307785775426e90700ab2eba5cd623/psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:a6af77b6626ce92b5817bf294b4d45ec1a6161dba80fc2d82cdffdd6814fd023", size = 4208884, upload-time = "2026-02-18T16:51:57.336Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2c/a4981bf42cf30ebba0424971d7ce70a222ae9b82594c42fc3f2105d7b525/psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:47f06fcbe8542b4d96d7392c476a74ada521c5aebdb41c3c0155f6595fc14c8d", size = 3944542, upload-time = "2026-02-18T16:52:04.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/e9/b7c29b56aa0b85a4e0c4d89db691c1ceef08f46a356369144430c155a2f5/psycopg_binary-3.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e7800e6c6b5dc4b0ca7cc7370f770f53ac83886b76afda0848065a674231e856", size = 4254339, upload-time = "2026-02-18T16:52:10.444Z" }, + { url = "https://files.pythonhosted.org/packages/98/5a/291d89f44d3820fffb7a04ebc8f3ef5dda4f542f44a5daea0c55a84abf45/psycopg_binary-3.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:165f22ab5a9513a3d7425ffb7fcc7955ed8ccaeef6d37e369d6cc1dff1582383", size = 3652796, upload-time = "2026-02-18T16:52:14.02Z" }, +] + +[[package]] +name = "psycopg-pool" +version = "3.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/56/9a/9470d013d0d50af0da9c4251614aeb3c1823635cab3edc211e3839db0bcf/psycopg_pool-3.3.0.tar.gz", hash = "sha256:fa115eb2860bd88fce1717d75611f41490dec6135efb619611142b24da3f6db5", size = 31606, upload-time = "2025-12-01T11:34:33.11Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/c3/26b8a0908a9db249de3b4169692e1c7c19048a9bc41a4d3209cee7dbb758/psycopg_pool-3.3.0-py3-none-any.whl", hash = "sha256:2e44329155c410b5e8666372db44276a8b1ebd8c90f1c3026ebba40d4bc81063", size = 39995, upload-time = "2025-12-01T11:34:29.761Z" }, ] [[package]] @@ -2829,11 +3208,11 @@ wheels = [ [[package]] name = "pycparser" -version = "2.23" +version = "3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, ] [[package]] @@ -2868,7 +3247,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.12.5" +version = "2.13.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -2876,94 +3255,98 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/e4/40d09941a2cebcb20609b86a559817d5b9291c49dd6f8c87e5feffbe703a/pydantic-2.13.3.tar.gz", hash = "sha256:af09e9d1d09f4e7fe37145c1f577e1d61ceb9a41924bf0094a36506285d0a84d", size = 844068, upload-time = "2026-04-20T14:46:43.632Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, + { url = "https://files.pythonhosted.org/packages/f3/0a/fd7d723f8f8153418fb40cf9c940e82004fce7e987026b08a68a36dd3fe7/pydantic-2.13.3-py3-none-any.whl", hash = "sha256:6db14ac8dfc9a1e57f87ea2c0de670c251240f43cb0c30a5130e9720dc612927", size = 471981, upload-time = "2026-04-20T14:46:41.402Z" }, ] [[package]] name = "pydantic-core" -version = "2.41.5" +version = "2.46.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/ef/f7abb56c49382a246fd2ce9c799691e3c3e7175ec74b14d99e798bcddb1a/pydantic_core-2.46.3.tar.gz", hash = "sha256:41c178f65b8c29807239d47e6050262eb6bf84eb695e41101e62e38df4a5bc2c", size = 471412, upload-time = "2026-04-20T14:40:56.672Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, - { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, - { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, - { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, - { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, - { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, - { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, - { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, - { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, - { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, - { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, - { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, - { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, - { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, - { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, - { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, - { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, - { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, - { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, - { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, - { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, - { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, - { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, - { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, - { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, - { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, - { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, - { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, - { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, - { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, - { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, - { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, - { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, - { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, - { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, - { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, - { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, - { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, - { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, - { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, - { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, - { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, - { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, - { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, - { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, - { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, - { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, - { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, - { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, - { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, - { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, - { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, - { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/4b/cb/5b47425556ecc1f3fe18ed2a0083188aa46e1dd812b06e406475b3a5d536/pydantic_core-2.46.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b11b59b3eee90a80a36701ddb4576d9ae31f93f05cb9e277ceaa09e6bf074a67", size = 2101946, upload-time = "2026-04-20T14:40:52.581Z" }, + { url = "https://files.pythonhosted.org/packages/a1/4f/2fb62c2267cae99b815bbf4a7b9283812c88ca3153ef29f7707200f1d4e5/pydantic_core-2.46.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af8653713055ea18a3abc1537fe2ebc42f5b0bbb768d1eb79fd74eb47c0ac089", size = 1951612, upload-time = "2026-04-20T14:42:42.996Z" }, + { url = "https://files.pythonhosted.org/packages/50/6e/b7348fd30d6556d132cddd5bd79f37f96f2601fe0608afac4f5fb01ec0b3/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a519dab6d63c514f3a81053e5266c549679e4aa88f6ec57f2b7b854aceb1b0", size = 1977027, upload-time = "2026-04-20T14:42:02.001Z" }, + { url = "https://files.pythonhosted.org/packages/82/11/31d60ee2b45540d3fb0b29302a393dbc01cd771c473f5b5147bcd353e593/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6cd87cb1575b1ad05ba98894c5b5c96411ef678fa2f6ed2576607095b8d9789", size = 2063008, upload-time = "2026-04-20T14:44:17.952Z" }, + { url = "https://files.pythonhosted.org/packages/8a/db/3a9d1957181b59258f44a2300ab0f0be9d1e12d662a4f57bb31250455c52/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f80a55484b8d843c8ada81ebf70a682f3f00a3d40e378c06cf17ecb44d280d7d", size = 2233082, upload-time = "2026-04-20T14:40:57.934Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e1/3277c38792aeb5cfb18c2f0c5785a221d9ff4e149abbe1184d53d5f72273/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3861f1731b90c50a3266316b9044f5c9b405eecb8e299b0a7120596334e4fe9c", size = 2304615, upload-time = "2026-04-20T14:42:12.584Z" }, + { url = "https://files.pythonhosted.org/packages/5e/d5/e3d9717c9eba10855325650afd2a9cba8e607321697f18953af9d562da2f/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb528e295ed31570ac3dcc9bfdd6e0150bc11ce6168ac87a8082055cf1a67395", size = 2094380, upload-time = "2026-04-20T14:43:05.522Z" }, + { url = "https://files.pythonhosted.org/packages/a1/20/abac35dedcbfd66c6f0b03e4e3564511771d6c9b7ede10a362d03e110d9b/pydantic_core-2.46.3-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:367508faa4973b992b271ba1494acaab36eb7e8739d1e47be5035fb1ea225396", size = 2135429, upload-time = "2026-04-20T14:41:55.549Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a5/41bfd1df69afad71b5cf0535055bccc73022715ad362edbc124bc1e021d7/pydantic_core-2.46.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ad3c826fe523e4becf4fe39baa44286cff85ef137c729a2c5e269afbfd0905d", size = 2174582, upload-time = "2026-04-20T14:41:45.96Z" }, + { url = "https://files.pythonhosted.org/packages/79/65/38d86ea056b29b2b10734eb23329b7a7672ca604df4f2b6e9c02d4ee22fe/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ec638c5d194ef8af27db69f16c954a09797c0dc25015ad6123eb2c73a4d271ca", size = 2187533, upload-time = "2026-04-20T14:40:55.367Z" }, + { url = "https://files.pythonhosted.org/packages/b6/55/a1129141678a2026badc539ad1dee0a71d06f54c2f06a4bd68c030ac781b/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:28ed528c45446062ee66edb1d33df5d88828ae167de76e773a3c7f64bd14e976", size = 2332985, upload-time = "2026-04-20T14:44:13.05Z" }, + { url = "https://files.pythonhosted.org/packages/d7/60/cb26f4077719f709e54819f4e8e1d43f4091f94e285eb6bd21e1190a7b7c/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aed19d0c783886d5bd86d80ae5030006b45e28464218747dcf83dabfdd092c7b", size = 2373670, upload-time = "2026-04-20T14:41:53.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/7e/c3f21882bdf1d8d086876f81b5e296206c69c6082551d776895de7801fa0/pydantic_core-2.46.3-cp312-cp312-win32.whl", hash = "sha256:06d5d8820cbbdb4147578c1fe7ffcd5b83f34508cb9f9ab76e807be7db6ff0a4", size = 1966722, upload-time = "2026-04-20T14:44:30.588Z" }, + { url = "https://files.pythonhosted.org/packages/57/be/6b5e757b859013ebfbd7adba02f23b428f37c86dcbf78b5bb0b4ffd36e99/pydantic_core-2.46.3-cp312-cp312-win_amd64.whl", hash = "sha256:c3212fda0ee959c1dd04c60b601ec31097aaa893573a3a1abd0a47bcac2968c1", size = 2072970, upload-time = "2026-04-20T14:42:54.248Z" }, + { url = "https://files.pythonhosted.org/packages/bf/f8/a989b21cc75e9a32d24192ef700eea606521221a89faa40c919ce884f2b1/pydantic_core-2.46.3-cp312-cp312-win_arm64.whl", hash = "sha256:f1f8338dd7a7f31761f1f1a3c47503a9a3b34eea3c8b01fa6ee96408affb5e72", size = 2035963, upload-time = "2026-04-20T14:44:20.4Z" }, + { url = "https://files.pythonhosted.org/packages/9b/3c/9b5e8eb9821936d065439c3b0fb1490ffa64163bfe7e1595985a47896073/pydantic_core-2.46.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:12bc98de041458b80c86c56b24df1d23832f3e166cbaff011f25d187f5c62c37", size = 2102109, upload-time = "2026-04-20T14:41:24.219Z" }, + { url = "https://files.pythonhosted.org/packages/91/97/1c41d1f5a19f241d8069f1e249853bcce378cdb76eec8ab636d7bc426280/pydantic_core-2.46.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85348b8f89d2c3508b65b16c3c33a4da22b8215138d8b996912bb1532868885f", size = 1951820, upload-time = "2026-04-20T14:42:14.236Z" }, + { url = "https://files.pythonhosted.org/packages/30/b4/d03a7ae14571bc2b6b3c7b122441154720619afe9a336fa3a95434df5e2f/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1105677a6df914b1fb71a81b96c8cce7726857e1717d86001f29be06a25ee6f8", size = 1977785, upload-time = "2026-04-20T14:42:31.648Z" }, + { url = "https://files.pythonhosted.org/packages/ae/0c/4086f808834b59e3c8f1aa26df8f4b6d998cdcf354a143d18ef41529d1fe/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87082cd65669a33adeba5470769e9704c7cf026cc30afb9cc77fd865578ebaad", size = 2062761, upload-time = "2026-04-20T14:40:37.093Z" }, + { url = "https://files.pythonhosted.org/packages/fa/71/a649be5a5064c2df0db06e0a512c2281134ed2fcc981f52a657936a7527c/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60e5f66e12c4f5212d08522963380eaaeac5ebd795826cfd19b2dfb0c7a52b9c", size = 2232989, upload-time = "2026-04-20T14:42:59.254Z" }, + { url = "https://files.pythonhosted.org/packages/a2/84/7756e75763e810b3a710f4724441d1ecc5883b94aacb07ca71c5fb5cfb69/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6cdf19bf84128d5e7c37e8a73a0c5c10d51103a650ac585d42dd6ae233f2b7f", size = 2303975, upload-time = "2026-04-20T14:41:32.287Z" }, + { url = "https://files.pythonhosted.org/packages/6c/35/68a762e0c1e31f35fa0dac733cbd9f5b118042853698de9509c8e5bf128b/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031bb17f4885a43773c8c763089499f242aee2ea85cf17154168775dccdecf35", size = 2095325, upload-time = "2026-04-20T14:42:47.685Z" }, + { url = "https://files.pythonhosted.org/packages/77/bf/1bf8c9a8e91836c926eae5e3e51dce009bf495a60ca56060689d3df3f340/pydantic_core-2.46.3-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:bcf2a8b2982a6673693eae7348ef3d8cf3979c1d63b54fca7c397a635cc68687", size = 2133368, upload-time = "2026-04-20T14:41:22.766Z" }, + { url = "https://files.pythonhosted.org/packages/e5/50/87d818d6bab915984995157ceb2380f5aac4e563dddbed6b56f0ed057aba/pydantic_core-2.46.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28e8cf2f52d72ced402a137145923a762cbb5081e48b34312f7a0c8f55928ec3", size = 2173908, upload-time = "2026-04-20T14:42:52.044Z" }, + { url = "https://files.pythonhosted.org/packages/91/88/a311fb306d0bd6185db41fa14ae888fb81d0baf648a761ae760d30819d33/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:17eaface65d9fc5abb940003020309c1bf7a211f5f608d7870297c367e6f9022", size = 2186422, upload-time = "2026-04-20T14:43:29.55Z" }, + { url = "https://files.pythonhosted.org/packages/8f/79/28fd0d81508525ab2054fef7c77a638c8b5b0afcbbaeee493cf7c3fef7e1/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:93fd339f23408a07e98950a89644f92c54d8729719a40b30c0a30bb9ebc55d23", size = 2332709, upload-time = "2026-04-20T14:42:16.134Z" }, + { url = "https://files.pythonhosted.org/packages/b3/21/795bf5fe5c0f379308b8ef19c50dedab2e7711dbc8d0c2acf08f1c7daa05/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:23cbdb3aaa74dfe0837975dbf69b469753bbde8eacace524519ffdb6b6e89eb7", size = 2372428, upload-time = "2026-04-20T14:41:10.974Z" }, + { url = "https://files.pythonhosted.org/packages/45/b3/ed14c659cbe7605e3ef063077680a64680aec81eb1a04763a05190d49b7f/pydantic_core-2.46.3-cp313-cp313-win32.whl", hash = "sha256:610eda2e3838f401105e6326ca304f5da1e15393ae25dacae5c5c63f2c275b13", size = 1965601, upload-time = "2026-04-20T14:41:42.128Z" }, + { url = "https://files.pythonhosted.org/packages/ef/bb/adb70d9a762ddd002d723fbf1bd492244d37da41e3af7b74ad212609027e/pydantic_core-2.46.3-cp313-cp313-win_amd64.whl", hash = "sha256:68cc7866ed863db34351294187f9b729964c371ba33e31c26f478471c52e1ed0", size = 2071517, upload-time = "2026-04-20T14:43:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/52/eb/66faefabebfe68bd7788339c9c9127231e680b11906368c67ce112fdb47f/pydantic_core-2.46.3-cp313-cp313-win_arm64.whl", hash = "sha256:f64b5537ac62b231572879cd08ec05600308636a5d63bcbdb15063a466977bec", size = 2035802, upload-time = "2026-04-20T14:43:38.507Z" }, + { url = "https://files.pythonhosted.org/packages/7f/db/a7bcb4940183fda36022cd18ba8dd12f2dff40740ec7b58ce7457befa416/pydantic_core-2.46.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:afa3aa644f74e290cdede48a7b0bee37d1c35e71b05105f6b340d484af536d9b", size = 2097614, upload-time = "2026-04-20T14:44:38.374Z" }, + { url = "https://files.pythonhosted.org/packages/24/35/e4066358a22e3e99519db370494c7528f5a2aa1367370e80e27e20283543/pydantic_core-2.46.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ced3310e51aa425f7f77da8bbbb5212616655bedbe82c70944320bc1dbe5e018", size = 1951896, upload-time = "2026-04-20T14:40:53.996Z" }, + { url = "https://files.pythonhosted.org/packages/87/92/37cf4049d1636996e4b888c05a501f40a43ff218983a551d57f9d5e14f0d/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e29908922ce9da1a30b4da490bd1d3d82c01dcfdf864d2a74aacee674d0bfa34", size = 1979314, upload-time = "2026-04-20T14:41:49.446Z" }, + { url = "https://files.pythonhosted.org/packages/d8/36/9ff4d676dfbdfb2d591cf43f3d90ded01e15b1404fd101180ed2d62a2fd3/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c9ff69140423eea8ed2d5477df3ba037f671f5e897d206d921bc9fdc39613e7", size = 2056133, upload-time = "2026-04-20T14:42:23.574Z" }, + { url = "https://files.pythonhosted.org/packages/bc/f0/405b442a4d7ba855b06eec8b2bf9c617d43b8432d099dfdc7bf999293495/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b675ab0a0d5b1c8fdb81195dc5bcefea3f3c240871cdd7ff9a2de8aa50772eb2", size = 2228726, upload-time = "2026-04-20T14:44:22.816Z" }, + { url = "https://files.pythonhosted.org/packages/e7/f8/65cd92dd5a0bd89ba277a98ecbfaf6fc36bbd3300973c7a4b826d6ab1391/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0087084960f209a9a4af50ecd1fb063d9ad3658c07bb81a7a53f452dacbfb2ba", size = 2301214, upload-time = "2026-04-20T14:44:48.792Z" }, + { url = "https://files.pythonhosted.org/packages/fd/86/ef96a4c6e79e7a2d0410826a68fbc0eccc0fd44aa733be199d5fcac3bb87/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed42e6cc8e1b0e2b9b96e2276bad70ae625d10d6d524aed0c93de974ae029f9f", size = 2099927, upload-time = "2026-04-20T14:41:40.196Z" }, + { url = "https://files.pythonhosted.org/packages/6d/53/269caf30e0096e0a8a8f929d1982a27b3879872cca2d917d17c2f9fdf4fe/pydantic_core-2.46.3-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:f1771ce258afb3e4201e67d154edbbae712a76a6081079fe247c2f53c6322c22", size = 2128789, upload-time = "2026-04-20T14:41:15.868Z" }, + { url = "https://files.pythonhosted.org/packages/00/b0/1a6d9b6a587e118482910c244a1c5acf4d192604174132efd12bf0ac486f/pydantic_core-2.46.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7610b6a5242a6c736d8ad47fd5fff87fcfe8f833b281b1c409c3d6835d9227f", size = 2173815, upload-time = "2026-04-20T14:44:25.152Z" }, + { url = "https://files.pythonhosted.org/packages/87/56/e7e00d4041a7e62b5a40815590114db3b535bf3ca0bf4dca9f16cef25246/pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:ff5e7783bcc5476e1db448bf268f11cb257b1c276d3e89f00b5727be86dd0127", size = 2181608, upload-time = "2026-04-20T14:41:28.933Z" }, + { url = "https://files.pythonhosted.org/packages/e8/22/4bd23c3d41f7c185d60808a1de83c76cf5aeabf792f6c636a55c3b1ec7f9/pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:9d2e32edcc143bc01e95300671915d9ca052d4f745aa0a49c48d4803f8a85f2c", size = 2326968, upload-time = "2026-04-20T14:42:03.962Z" }, + { url = "https://files.pythonhosted.org/packages/24/ac/66cd45129e3915e5ade3b292cb3bc7fd537f58f8f8dbdaba6170f7cabb74/pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d83d1c6b87fa56b521479cff237e626a292f3b31b6345c15a99121b454c1", size = 2369842, upload-time = "2026-04-20T14:41:35.52Z" }, + { url = "https://files.pythonhosted.org/packages/a2/51/dd4248abb84113615473aa20d5545b7c4cd73c8644003b5259686f93996c/pydantic_core-2.46.3-cp314-cp314-win32.whl", hash = "sha256:07bc6d2a28c3adb4f7c6ae46aa4f2d2929af127f587ed44057af50bf1ce0f505", size = 1959661, upload-time = "2026-04-20T14:41:00.042Z" }, + { url = "https://files.pythonhosted.org/packages/20/eb/59980e5f1ae54a3b86372bd9f0fa373ea2d402e8cdcd3459334430f91e91/pydantic_core-2.46.3-cp314-cp314-win_amd64.whl", hash = "sha256:8940562319bc621da30714617e6a7eaa6b98c84e8c685bcdc02d7ed5e7c7c44e", size = 2071686, upload-time = "2026-04-20T14:43:16.471Z" }, + { url = "https://files.pythonhosted.org/packages/8c/db/1cf77e5247047dfee34bc01fa9bca134854f528c8eb053e144298893d370/pydantic_core-2.46.3-cp314-cp314-win_arm64.whl", hash = "sha256:5dcbbcf4d22210ced8f837c96db941bdb078f419543472aca5d9a0bb7cddc7df", size = 2026907, upload-time = "2026-04-20T14:43:31.732Z" }, + { url = "https://files.pythonhosted.org/packages/57/c0/b3df9f6a543276eadba0a48487b082ca1f201745329d97dbfa287034a230/pydantic_core-2.46.3-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:d0fe3dce1e836e418f912c1ad91c73357d03e556a4d286f441bf34fed2dbeecf", size = 2095047, upload-time = "2026-04-20T14:42:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/57/886a938073b97556c168fd99e1a7305bb363cd30a6d2c76086bf0587b32a/pydantic_core-2.46.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9ce92e58abc722dac1bf835a6798a60b294e48eb0e625ec9fd994b932ac5feee", size = 1934329, upload-time = "2026-04-20T14:43:49.655Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7c/b42eaa5c34b13b07ecb51da21761297a9b8eb43044c864a035999998f328/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a03e6467f0f5ab796a486146d1b887b2dc5e5f9b3288898c1b1c3ad974e53e4a", size = 1974847, upload-time = "2026-04-20T14:42:10.737Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9b/92b42db6543e7de4f99ae977101a2967b63122d4b6cf7773812da2d7d5b5/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2798b6ba041b9d70acfb9071a2ea13c8456dd1e6a5555798e41ba7b0790e329c", size = 2041742, upload-time = "2026-04-20T14:40:44.262Z" }, + { url = "https://files.pythonhosted.org/packages/0f/19/46fbe1efabb5aa2834b43b9454e70f9a83ad9c338c1291e48bdc4fecf167/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9be3e221bdc6d69abf294dcf7aff6af19c31a5cdcc8f0aa3b14be29df4bd03b1", size = 2236235, upload-time = "2026-04-20T14:41:27.307Z" }, + { url = "https://files.pythonhosted.org/packages/77/da/b3f95bc009ad60ec53120f5d16c6faa8cabdbe8a20d83849a1f2b8728148/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f13936129ce841f2a5ddf6f126fea3c43cd128807b5a59588c37cf10178c2e64", size = 2282633, upload-time = "2026-04-20T14:44:33.271Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6e/401336117722e28f32fb8220df676769d28ebdf08f2f4469646d404c43a3/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28b5f2ef03416facccb1c6ef744c69793175fd27e44ef15669201601cf423acb", size = 2109679, upload-time = "2026-04-20T14:44:41.065Z" }, + { url = "https://files.pythonhosted.org/packages/fc/53/b289f9bc8756a32fe718c46f55afaeaf8d489ee18d1a1e7be1db73f42cc4/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:830d1247d77ad23852314f069e9d7ddafeec5f684baf9d7e7065ed46a049c4e6", size = 2108342, upload-time = "2026-04-20T14:42:50.144Z" }, + { url = "https://files.pythonhosted.org/packages/10/5b/8292fc7c1f9111f1b2b7c1b0dcf1179edcd014fc3ea4517499f50b829d71/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0793c90c1a3c74966e7975eaef3ed30ebdff3260a0f815a62a22adc17e4c01c", size = 2157208, upload-time = "2026-04-20T14:42:08.133Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9e/f80044e9ec07580f057a89fc131f78dda7a58751ddf52bbe05eaf31db50f/pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:d2d0aead851b66f5245ec0c4fb2612ef457f8bbafefdf65a2bf9d6bac6140f47", size = 2167237, upload-time = "2026-04-20T14:42:25.412Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/6781a1b037f3b96be9227edbd1101f6d3946746056231bf4ac48cdff1a8d/pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:2f40e4246676beb31c5ce77c38a55ca4e465c6b38d11ea1bd935420568e0b1ab", size = 2312540, upload-time = "2026-04-20T14:40:40.313Z" }, + { url = "https://files.pythonhosted.org/packages/3e/db/19c0839feeb728e7df03255581f198dfdf1c2aeb1e174a8420b63c5252e5/pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:cf489cf8986c543939aeee17a09c04d6ffb43bfef8ca16fcbcc5cfdcbed24dba", size = 2369556, upload-time = "2026-04-20T14:41:09.427Z" }, + { url = "https://files.pythonhosted.org/packages/e0/15/3228774cb7cd45f5f721ddf1b2242747f4eb834d0c491f0c02d606f09fed/pydantic_core-2.46.3-cp314-cp314t-win32.whl", hash = "sha256:ffe0883b56cfc05798bf994164d2b2ff03efe2d22022a2bb080f3b626176dd56", size = 1949756, upload-time = "2026-04-20T14:41:25.717Z" }, + { url = "https://files.pythonhosted.org/packages/b8/2a/c79cf53fd91e5a87e30d481809f52f9a60dd221e39de66455cf04deaad37/pydantic_core-2.46.3-cp314-cp314t-win_amd64.whl", hash = "sha256:706d9d0ce9cf4593d07270d8e9f53b161f90c57d315aeec4fb4fd7a8b10240d8", size = 2051305, upload-time = "2026-04-20T14:43:18.627Z" }, + { url = "https://files.pythonhosted.org/packages/0b/db/d8182a7f1d9343a032265aae186eb063fe26ca4c40f256b21e8da4498e89/pydantic_core-2.46.3-cp314-cp314t-win_arm64.whl", hash = "sha256:77706aeb41df6a76568434701e0917da10692da28cb69d5fb6919ce5fdb07374", size = 2026310, upload-time = "2026-04-20T14:41:01.778Z" }, + { url = "https://files.pythonhosted.org/packages/34/42/f426db557e8ab2791bc7562052299944a118655496fbff99914e564c0a94/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b12dd51f1187c2eb489af8e20f880362db98e954b54ab792fa5d92e8bcc6b803", size = 2091877, upload-time = "2026-04-20T14:43:27.091Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4f/86a832a9d14df58e663bfdf4627dc00d3317c2bd583c4fb23390b0f04b8e/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f00a0961b125f1a47af7bcc17f00782e12f4cd056f83416006b30111d941dfa3", size = 1932428, upload-time = "2026-04-20T14:40:45.781Z" }, + { url = "https://files.pythonhosted.org/packages/11/1a/fe857968954d93fb78e0d4b6df5c988c74c4aaa67181c60be7cfe327c0ca/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57697d7c056aca4bbb680200f96563e841a6386ac1129370a0102592f4dddff5", size = 1997550, upload-time = "2026-04-20T14:44:02.425Z" }, + { url = "https://files.pythonhosted.org/packages/17/eb/9d89ad2d9b0ba8cd65393d434471621b98912abb10fbe1df08e480ba57b5/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd35aa21299def8db7ef4fe5c4ff862941a9a158ca7b63d61e66fe67d30416b4", size = 2137657, upload-time = "2026-04-20T14:42:45.149Z" }, ] [[package]] name = "pydantic-settings" -version = "2.12.0" +version = "2.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" } +sdist = { url = "https://files.pythonhosted.org/packages/42/98/c8345dccdc31de4228c039a98f6467a941e39558da41c1744fbe29fa5666/pydantic_settings-2.14.0.tar.gz", hash = "sha256:24285fd4b0e0c06507dd9fdfd331ee23794305352aaec8fc4eb92d4047aeb67d", size = 235709, upload-time = "2026-04-20T13:37:40.293Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" }, + { url = "https://files.pythonhosted.org/packages/01/dd/bebff3040138f00ae8a102d426b27349b9a49acc310fcae7f92112d867e3/pydantic_settings-2.14.0-py3-none-any.whl", hash = "sha256:fc8d5d692eb7092e43c8647c1c35a3ecd00e040fcf02ed86f4cb5458ca62182e", size = 60940, upload-time = "2026-04-20T13:37:38.586Z" }, ] [[package]] @@ -2989,20 +3372,20 @@ wheels = [ [[package]] name = "pygments" -version = "2.19.2" +version = "2.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, ] [[package]] name = "pyjwt" -version = "2.12.0" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a8/10/e8192be5f38f3e8e7e046716de4cae33d56fd5ae08927a823bb916be36c1/pyjwt-2.12.0.tar.gz", hash = "sha256:2f62390b667cd8257de560b850bb5a883102a388829274147f1d724453f8fb02", size = 102511, upload-time = "2026-03-12T17:15:30.831Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c2/27/a3b6e5bf6ff856d2509292e95c8f57f0df7017cf5394921fc4e4ef40308a/pyjwt-2.12.1.tar.gz", hash = "sha256:c74a7a2adf861c04d002db713dd85f84beb242228e671280bf709d765b03672b", size = 102564, upload-time = "2026-03-13T19:27:37.25Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/15/70/70f895f404d363d291dcf62c12c85fdd47619ad9674ac0f53364d035925a/pyjwt-2.12.0-py3-none-any.whl", hash = "sha256:9bb459d1bdd0387967d287f5656bf7ec2b9a26645d1961628cda1764e087fd6e", size = 29700, upload-time = "2026-03-12T17:15:29.257Z" }, + { url = "https://files.pythonhosted.org/packages/e5/7a/8dd906bd22e79e47397a61742927f6747fe93242ef86645ee9092e610244/pyjwt-2.12.1-py3-none-any.whl", hash = "sha256:28ca37c070cad8ba8cd9790cd940535d40274d22f80ab87f3ac6a713e6e8454c", size = 29726, upload-time = "2026-03-13T19:27:35.677Z" }, ] [package.optional-dependencies] @@ -3012,23 +3395,23 @@ crypto = [ [[package]] name = "pymupdf" -version = "1.27.2.2" +version = "1.27.2.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/32/f6b645c51d79a188a4844140c5dabca7b487ad56c4be69c4bc782d0d11a9/pymupdf-1.27.2.2.tar.gz", hash = "sha256:ea8fdc3ab6671ca98f629d5ec3032d662c8cf1796b146996b7ad306ac7ed3335", size = 85354380, upload-time = "2026-03-20T09:47:58.386Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/32/708bedc9dde7b328d45abbc076091769d44f2f24ad151ad92d56a6ec142b/pymupdf-1.27.2.3.tar.gz", hash = "sha256:7a92faa25129e8bbec5e50eeb9214f187665428c31b05c4ef6e36c58c0b1c6d2", size = 85759618, upload-time = "2026-04-24T14:13:14.42Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/90/88/d01992a50165e22dec057a1129826846c547feb4ba07f42720ac030ce438/pymupdf-1.27.2.2-cp310-abi3-macosx_10_9_x86_64.whl", hash = "sha256:800f43e60a6f01f644343c2213b8613db02eaf4f4ba235b417b3351fa99e01c0", size = 23987563, upload-time = "2026-03-19T12:35:42.989Z" }, - { url = "https://files.pythonhosted.org/packages/6d/0e/9f526bc1d49d8082eff0d1547a69d541a0c5a052e71da625559efaba46a6/pymupdf-1.27.2.2-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:8e2e4299ef1ac0c9dff9be096cbd22783699673abecfa7c3f73173ae06421d73", size = 23263089, upload-time = "2026-03-20T09:44:16.982Z" }, - { url = "https://files.pythonhosted.org/packages/42/be/984f0d6343935b5dd30afaed6be04fc753146bf55709e63ef28bf9ef7497/pymupdf-1.27.2.2-cp310-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5e3d54922db1c7da844f1208ac1db05704770988752311f81dd36694ae0a07b", size = 24318817, upload-time = "2026-03-20T09:44:33.209Z" }, - { url = "https://files.pythonhosted.org/packages/22/8e/85e9d9f11dbf34036eb1df283805ef6b885f2005a56d6533bb58ab0b8a11/pymupdf-1.27.2.2-cp310-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:892698c9768457eb0991c102c96a856c0a7062539371df5e6bee0816f3ef498e", size = 24948135, upload-time = "2026-03-20T09:44:51.012Z" }, - { url = "https://files.pythonhosted.org/packages/db/e6/386edb017e5b93f1ab0bf6653ae32f3dd8dfc834ed770212e10ca62f4af9/pymupdf-1.27.2.2-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8b4bbfa6ef347fade678771a93f6364971c51a2cdc44cd2400dc4eeed1ddb4e6", size = 25169585, upload-time = "2026-03-20T09:45:05.393Z" }, - { url = "https://files.pythonhosted.org/packages/ba/fd/f1ebe24fcd31aaea8b85b3a7ac4c3fc96e20388be5466ace27c9a3c546d9/pymupdf-1.27.2.2-cp310-abi3-win32.whl", hash = "sha256:0b8e924433b7e0bd46be820899300259235997d5a747638471fb2762baa8ee30", size = 18008861, upload-time = "2026-03-20T09:45:21.353Z" }, - { url = "https://files.pythonhosted.org/packages/a8/b6/2a9a8556000199bbf80a5915dcd15d550d1e5288894316445c54726aaf53/pymupdf-1.27.2.2-cp310-abi3-win_amd64.whl", hash = "sha256:09bb53f9486ccb5297030cbc2dbdae845ba1c3c5126e96eb2d16c4f118de0b5b", size = 19238032, upload-time = "2026-03-20T09:45:37.941Z" }, - { url = "https://files.pythonhosted.org/packages/c2/c6/e3e11c42f09b9c34ec332c0f37b817671b59ef4001895b854f0494092105/pymupdf-1.27.2.2-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6cebfbbdfd219ebdebf4d8e3914624b2e3d3a844c43f4f76935822dd9b13cc12", size = 24985299, upload-time = "2026-03-20T09:45:53.26Z" }, + { url = "https://files.pythonhosted.org/packages/dc/09/ddbdfa7ee91fbabd6f63d7d744884cbdfe3e7ff9b8604749fb38bddf5c5d/pymupdf-1.27.2.3-cp310-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc1bc3cae6e9e150b0dbb0a9221bdfd411d65f0db2fe359eaa22467d7cc2a05f", size = 24002636, upload-time = "2026-04-24T14:09:17.459Z" }, + { url = "https://files.pythonhosted.org/packages/01/89/3f8edd6c4f50ca370e2a2f2a3011face36f3760728ffe76dffec91c0fca0/pymupdf-1.27.2.3-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:660d93cb6da5bbddf11d3982ae27745dd3a9902d9f24cdb69adab83962294b5a", size = 23278238, upload-time = "2026-04-24T14:09:32.882Z" }, + { url = "https://files.pythonhosted.org/packages/c3/26/b7e5a70eb83bd189f8b5df87ec442746b992f2f632662839b288170d357d/pymupdf-1.27.2.3-cp310-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1dd460a3ae4597a755f00a3bd9771f5ebf1531dc111f6a36bf05dd00a6b84425", size = 24333923, upload-time = "2026-04-24T14:09:47.341Z" }, + { url = "https://files.pythonhosted.org/packages/e4/a0/aa1ee2240f29481a04a827c313333b4ecd8a14d6ac3e15d3f41a30574781/pymupdf-1.27.2.3-cp310-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:857842b4888827bd6155a1131341b2822a7ebe9a8c15a975fd7d490d7a64a30c", size = 24963198, upload-time = "2026-04-24T14:10:07.408Z" }, + { url = "https://files.pythonhosted.org/packages/69/49/4f742451f980840829fc00ba158bebb25d389c846d8f4f8c65936ee55de8/pymupdf-1.27.2.3-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:580983849c64a08d08344ca3d1580e87c01f046a8392421797bc850efd72a5b6", size = 25184609, upload-time = "2026-04-24T14:10:22.911Z" }, + { url = "https://files.pythonhosted.org/packages/f6/3f/3853d6608f394faf6eec2bd4e8ea9f6a00beea329b071abdb29f4164cc3d/pymupdf-1.27.2.3-cp310-abi3-win32.whl", hash = "sha256:a5c1088a87189891a4946ab314a14b7934ac4c5b6077f7e74ebee956f8906d0e", size = 18019286, upload-time = "2026-04-24T14:10:34.239Z" }, + { url = "https://files.pythonhosted.org/packages/44/47/5fb10fe73f96b31253a41647c362ea9e0380920bddf16028414a051247fc/pymupdf-1.27.2.3-cp310-abi3-win_amd64.whl", hash = "sha256:d20f68ef15195e073071dbc4ae7455257c7889af7584e39df490c0a92728526e", size = 19249102, upload-time = "2026-04-24T14:10:46.72Z" }, + { url = "https://files.pythonhosted.org/packages/53/a4/b9e91aac82293f9c954654c85581ee8212b5b05efadc534b581141241e6f/pymupdf-1.27.2.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:77691604c5d1d0233827139bbcdea61fd57879c84712b8e49b1f45520f7ab9c2", size = 25000393, upload-time = "2026-04-24T14:11:01.669Z" }, ] [[package]] name = "pymupdf-layout" -version = "1.27.2.2" +version = "1.27.2.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "networkx" }, @@ -3038,54 +3421,54 @@ dependencies = [ { name = "pyyaml" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/65/dd/4a9769b17661c1ee1b5bdeac28c832c9c7cc1ef425eb2088b5b5bd982bcc/pymupdf_layout-1.27.2.2-cp310-abi3-macosx_10_9_x86_64.whl", hash = "sha256:7b8f0d94d5675802c67e4af321214dcfce2de3d963926459dc6fc138607366cd", size = 15799842, upload-time = "2026-03-20T09:46:04.194Z" }, - { url = "https://files.pythonhosted.org/packages/ce/14/3ed13138449a002ab6957789019da5951fc8ba07ab8f1faf27a14c274717/pymupdf_layout-1.27.2.2-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:bef82a3ff5c05212c806333153cece2b9d972eed173d2352f0c514bb3f1faf54", size = 15795217, upload-time = "2026-03-20T09:46:14.142Z" }, - { url = "https://files.pythonhosted.org/packages/f7/20/487a2b1422999113ecc8b117cf50e72915992d0a7ef247164989396cf8db/pymupdf_layout-1.27.2.2-cp310-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d610359e1eb8013124531431f3b8c77818070e7869500b92c9b25bd78ea7ef7f", size = 15805238, upload-time = "2026-03-20T09:46:23.676Z" }, - { url = "https://files.pythonhosted.org/packages/02/45/35c67a1b1956618f69674b9823cc78e96787de37fe22a2b217581a1770a9/pymupdf_layout-1.27.2.2-cp310-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df503eab9c28cfaadb847970f39093958e7a2ebf79fc47426dbd91b9f9064d6c", size = 15806267, upload-time = "2026-03-20T09:46:33.089Z" }, - { url = "https://files.pythonhosted.org/packages/82/56/97fad0cd00869e934f7a130f251b21e3534ec0fcffaa3459286fbf3daf32/pymupdf_layout-1.27.2.2-cp310-abi3-win_amd64.whl", hash = "sha256:efc66387833f085b9e9a77089c748c88c4c96485772d7dfe0139eaa6efc2f444", size = 15809705, upload-time = "2026-03-20T09:46:43.009Z" }, + { url = "https://files.pythonhosted.org/packages/bc/ee/067726c3ee5574ad5c605d00d7419e264ef509d626a726f99388111f8216/pymupdf_layout-1.27.2.3-cp310-abi3-macosx_10_9_x86_64.whl", hash = "sha256:75c2ab3c0e8830ac2bc50cfd32d375a30768a2610dac72a02f08265336e0834f", size = 15799844, upload-time = "2026-04-24T14:11:13.177Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ba/46a7a36474722f9280d885f6eec878561a257d9378e52590b43d32ffb96c/pymupdf_layout-1.27.2.3-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:5656b09669dcd7c51f539afb6fdaf853602bab4cbc20479ee5ee1a85a4e32b60", size = 15795220, upload-time = "2026-04-24T14:11:23.17Z" }, + { url = "https://files.pythonhosted.org/packages/84/87/bfdcca67346052943a4549814f2009b38f4d15ec025798cdf7dfa5f57c84/pymupdf_layout-1.27.2.3-cp310-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fcf03aa815cbceebdb3263dd6a190de4547c46b1d168928836ec38738afe127d", size = 15805240, upload-time = "2026-04-24T14:11:33.465Z" }, + { url = "https://files.pythonhosted.org/packages/32/e9/7ce6eaf97cebd46c3808593282e9eb99a60cddd6183e25a636980d5c7986/pymupdf_layout-1.27.2.3-cp310-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:303b9414216dfaf711ec7d807b6f1e4c3e0a92bbb4569340fcedd9d5593d16ca", size = 15806269, upload-time = "2026-04-24T14:11:43.481Z" }, + { url = "https://files.pythonhosted.org/packages/bf/61/3b2417d8f2cdfaa0f4749cd9dafa3379cb5cdaddf4233165f1ff81953c30/pymupdf_layout-1.27.2.3-cp310-abi3-win_amd64.whl", hash = "sha256:503b64d9b6b31ea3af79ef85cf7d36950c5048af468cb297684d2953553c62ad", size = 15809163, upload-time = "2026-04-24T14:11:53.956Z" }, ] [[package]] name = "pymupdf4llm" -version = "1.27.2.2" +version = "1.27.2.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pymupdf" }, { name = "pymupdf-layout" }, { name = "tabulate" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/e7/8b97bf223ea2fd72efd862af3210ae3aa2fb15b39b55767de9e0a2fd0985/pymupdf4llm-1.27.2.2.tar.gz", hash = "sha256:f95e113d434958f8c63393c836fe965ad398d1fc07e7807c0a627c9ec1946e9f", size = 72877, upload-time = "2026-03-20T09:48:01.485Z" } +sdist = { url = "https://files.pythonhosted.org/packages/87/c0/e3830452d82032c3d82a9879616c05bf0c51e0dea03c1d80d57b3a6ec0d1/pymupdf4llm-1.27.2.3.tar.gz", hash = "sha256:42ec1a47ddc62be3f4f40c116d27618611c6f9fa366719016d9ddc3f3a3dc22b", size = 1406297, upload-time = "2026-04-24T14:13:18.843Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/fc/a4977b84f9a7e70aac4c9beed55d4693b985cef89fab7d49c896335bf158/pymupdf4llm-1.27.2.2-py3-none-any.whl", hash = "sha256:ec3bbceed21c6f86289155f29c557aa54ae1c8282c4a45d6de984f16fb4c90cb", size = 84294, upload-time = "2026-03-20T09:45:55.365Z" }, + { url = "https://files.pythonhosted.org/packages/e6/38/84bf29f4dd72e6c450546df6ca8f53021f764fd945ba67dcc235d39bc20e/pymupdf4llm-1.27.2.3-py3-none-any.whl", hash = "sha256:bd724b79fa3f06a5b28d7a65f7acfa8de56e04bdb603ac2d6dff315e0d151aaa", size = 77348, upload-time = "2026-04-24T14:11:04.305Z" }, ] [[package]] name = "pypdfium2" -version = "5.3.0" +version = "5.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/83/173dab58beb6c7e772b838199014c173a2436018dd7cfde9bbf4a3be15da/pypdfium2-5.3.0.tar.gz", hash = "sha256:2873ffc95fcb01f329257ebc64a5fdce44b36447b6b171fe62f7db5dc3269885", size = 268742, upload-time = "2026-01-05T16:29:03.02Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/13/ee794b8a810b7226426c8b50d6c28637c059e7da0caf9936164f352ef858/pypdfium2-5.7.1.tar.gz", hash = "sha256:3b3b20a56048dbe3fd4bf397f9bec854c834668bc47ef6a7d9041b23bb04317b", size = 266791, upload-time = "2026-04-20T15:01:02.598Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/a4/6bb5b5918c7fc236ec426be8a0205a984fe0a26ae23d5e4dd497398a6571/pypdfium2-5.3.0-py3-none-android_23_arm64_v8a.whl", hash = "sha256:885df6c78d41600cb086dc0c76b912d165b5bd6931ca08138329ea5a991b3540", size = 2763287, upload-time = "2026-01-05T16:28:24.21Z" }, - { url = "https://files.pythonhosted.org/packages/3e/64/24b41b906006bf07099b095f0420ee1f01a3a83a899f3e3731e4da99c06a/pypdfium2-5.3.0-py3-none-android_23_armeabi_v7a.whl", hash = "sha256:6e53dee6b333ee77582499eff800300fb5aa0c7eb8f52f95ccb5ca35ebc86d48", size = 2303285, upload-time = "2026-01-05T16:28:26.274Z" }, - { url = "https://files.pythonhosted.org/packages/c2/c0/3ec73f4ded83ba6c02acf6e9d228501759d5d74fe57f1b93849ab92dcc20/pypdfium2-5.3.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ce4466bdd62119fe25a5f74d107acc9db8652062bf217057630c6ff0bb419523", size = 2816066, upload-time = "2026-01-05T16:28:28.099Z" }, - { url = "https://files.pythonhosted.org/packages/62/ca/e553b3b8b5c2cdc3d955cc313493ac27bbe63fc22624769d56ded585dd5e/pypdfium2-5.3.0-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:cc2647fd03db42b8a56a8835e8bc7899e604e2042cd6fedeea53483185612907", size = 2945545, upload-time = "2026-01-05T16:28:29.489Z" }, - { url = "https://files.pythonhosted.org/packages/a1/56/615b776071e95c8570d579038256d0c77969ff2ff381e427be4ab8967f44/pypdfium2-5.3.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35e205f537ddb4069e4b4e22af7ffe84fcf2d686c3fee5e5349f73268a0ef1ca", size = 2979892, upload-time = "2026-01-05T16:28:31.088Z" }, - { url = "https://files.pythonhosted.org/packages/df/10/27114199b765bdb7d19a9514c07036ad2fc3a579b910e7823ba167ead6de/pypdfium2-5.3.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5795298f44050797ac030994fc2525ea35d2d714efe70058e0ee22e5f613f27", size = 2765738, upload-time = "2026-01-05T16:28:33.18Z" }, - { url = "https://files.pythonhosted.org/packages/b4/d7/2a3afa35e6c205a4f6264c33b8d2f659707989f93c30b336aa58575f66fa/pypdfium2-5.3.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7cd43dfceb77137e69e74c933d41506da1dddaff70f3a794fb0ad0d73e90d75", size = 3064338, upload-time = "2026-01-05T16:28:34.731Z" }, - { url = "https://files.pythonhosted.org/packages/a2/f1/6658755cf6e369bb51d0bccb81c51c300404fbe67c2f894c90000b6442dd/pypdfium2-5.3.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5956867558fd3a793e58691cf169718864610becb765bfe74dd83f05cbf1ae3", size = 3415059, upload-time = "2026-01-05T16:28:37.313Z" }, - { url = "https://files.pythonhosted.org/packages/f5/34/f86482134fa641deb1f524c45ec7ebd6fc8d404df40c5657ddfce528593e/pypdfium2-5.3.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ff1071e9a782625822658dfe6e29e3a644a66960f8713bb17819f5a0ac5987", size = 2998517, upload-time = "2026-01-05T16:28:38.873Z" }, - { url = "https://files.pythonhosted.org/packages/09/34/40ab99425dcf503c172885904c5dc356c052bfdbd085f9f3cc920e0b8b25/pypdfium2-5.3.0-py3-none-manylinux_2_27_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f319c46ead49d289ab8c1ed2ea63c91e684f35bdc4cf4dc52191c441182ac481", size = 3673154, upload-time = "2026-01-05T16:28:40.347Z" }, - { url = "https://files.pythonhosted.org/packages/a5/67/0f7532f80825a7728a5cbff3f1104857f8f9fe49ebfd6cb25582a89ae8e1/pypdfium2-5.3.0-py3-none-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6dc67a186da0962294321cace6ccc0a4d212dbc5e9522c640d35725a812324b8", size = 2965002, upload-time = "2026-01-05T16:28:42.143Z" }, - { url = "https://files.pythonhosted.org/packages/ce/6c/c03d2a3d6621b77aac9604bce1c060de2af94950448787298501eac6c6a2/pypdfium2-5.3.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0ad0afd3d2b5b54d86287266fd6ae3fef0e0a1a3df9d2c4984b3e3f8f70e6330", size = 4130530, upload-time = "2026-01-05T16:28:44.264Z" }, - { url = "https://files.pythonhosted.org/packages/af/39/9ad1f958cbe35d4693ae87c09ebafda4bb3e4709c7ccaec86c1a829163a3/pypdfium2-5.3.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1afe35230dc3951b3e79b934c0c35a2e79e2372d06503fce6cf1926d2a816f47", size = 3746568, upload-time = "2026-01-05T16:28:45.897Z" }, - { url = "https://files.pythonhosted.org/packages/2a/e2/4d32310166c2d6955d924737df8b0a3e3efc8d133344a98b10f96320157d/pypdfium2-5.3.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:00385793030cadce08469085cd21b168fd8ff981b009685fef3103bdc5fc4686", size = 4336683, upload-time = "2026-01-05T16:28:47.584Z" }, - { url = "https://files.pythonhosted.org/packages/14/ea/38c337ff12a8cec4b00fd4fdb0a63a70597a344581e20b02addbd301ab56/pypdfium2-5.3.0-py3-none-musllinux_1_2_ppc64le.whl", hash = "sha256:d911e82676398949697fef80b7f412078df14d725a91c10e383b727051530285", size = 4375030, upload-time = "2026-01-05T16:28:49.5Z" }, - { url = "https://files.pythonhosted.org/packages/a1/77/9d8de90c35d2fc383be8819bcde52f5821dacbd7404a0225e4010b99d080/pypdfium2-5.3.0-py3-none-musllinux_1_2_riscv64.whl", hash = "sha256:ca1dc625ed347fac3d9002a3ed33d521d5803409bd572e7b3f823c12ab2ef58f", size = 3928914, upload-time = "2026-01-05T16:28:51.433Z" }, - { url = "https://files.pythonhosted.org/packages/a5/39/9d4a6fbd78fcb6803b0ea5e4952a31d6182a0aaa2609cfcd0eb88446fdb8/pypdfium2-5.3.0-py3-none-musllinux_1_2_s390x.whl", hash = "sha256:ea4f9db2d3575f22cd41f4c7a855240ded842f135e59a961b5b1351a65ce2b6e", size = 4997777, upload-time = "2026-01-05T16:28:53.589Z" }, - { url = "https://files.pythonhosted.org/packages/9d/38/cdd4ed085c264234a59ad32df1dfe432c77a7403da2381e0fcc1ba60b74e/pypdfium2-5.3.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0ea24409613df350223c6afc50911c99dca0d43ddaf2616c5a1ebdffa3e1bcb5", size = 4179895, upload-time = "2026-01-05T16:28:55.322Z" }, - { url = "https://files.pythonhosted.org/packages/93/4c/d2f40145c9012482699664f615d7ae540a346c84f68a8179449e69dcc4d8/pypdfium2-5.3.0-py3-none-win32.whl", hash = "sha256:5bf695d603f9eb8fdd7c1786add5cf420d57fbc81df142ed63c029ce29614df9", size = 2993570, upload-time = "2026-01-05T16:28:58.37Z" }, - { url = "https://files.pythonhosted.org/packages/2c/dc/1388ea650020c26ef3f68856b9227e7f153dcaf445e7e4674a0b8f26891e/pypdfium2-5.3.0-py3-none-win_amd64.whl", hash = "sha256:8365af22a39d4373c265f8e90e561cd64d4ddeaf5e6a66546a8caed216ab9574", size = 3102340, upload-time = "2026-01-05T16:28:59.933Z" }, - { url = "https://files.pythonhosted.org/packages/c8/71/a433668d33999b3aeb2c2dda18aaf24948e862ea2ee148078a35daac6c1c/pypdfium2-5.3.0-py3-none-win_arm64.whl", hash = "sha256:0b2c6bf825e084d91d34456be54921da31e9199d9530b05435d69d1a80501a12", size = 2940987, upload-time = "2026-01-05T16:29:01.511Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f7/e87ba0eec9cd4e9eedd4bbb867515da970525ca8c105dd5e254758216ee3/pypdfium2-5.7.1-py3-none-android_23_arm64_v8a.whl", hash = "sha256:8008f45e8adc4fc1ec2a51e018e01cd0692d4859bdbb28e88be221804f329468", size = 3367033, upload-time = "2026-04-20T15:00:22.847Z" }, + { url = "https://files.pythonhosted.org/packages/f6/e1/a4b9be9a09fa9857958357ced51afb25518f6a48e4e68fdc9a091f0f2259/pypdfium2-5.7.1-py3-none-android_23_armeabi_v7a.whl", hash = "sha256:892fcb5a618f5f551fffdb968ac2d64911953c3ba0f9aa628239705af68dbe15", size = 2824449, upload-time = "2026-04-20T15:00:24.913Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5d/c91abb2610316a1622f86ddf706fcd04d34c7e6923c3fa8fa145c8f7a372/pypdfium2-5.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7431847d45dedc3c7ffede15b58ac611e996a0cdcd61318a0190d46b9980ac2b", size = 3443730, upload-time = "2026-04-20T15:00:26.664Z" }, + { url = "https://files.pythonhosted.org/packages/50/8b/b9eefed83d6a0a59384ee64d25c1515e831c234c3ed6b8c6dfc8f99f4875/pypdfium2-5.7.1-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:548bd09c9f97565ae8ddba30bb65823cbf791b84e4cdb63ed582aec2c289dbe2", size = 3626483, upload-time = "2026-04-20T15:00:28.629Z" }, + { url = "https://files.pythonhosted.org/packages/5b/98/6d62723e1f58d66e7e0073c4f12048f9d5dcd478369da0990db08e677dd5/pypdfium2-5.7.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18a15ad0918acc3ea98778394f0331b9ad2a1b7384ab3d8d8c63422ffd01ed13", size = 3610098, upload-time = "2026-04-20T15:00:30.344Z" }, + { url = "https://files.pythonhosted.org/packages/0b/4a/f72b42578f30971c29915e33ee598ed451aa6f0c2808a71526c1b81afd8d/pypdfium2-5.7.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1df04564659d807fb38810d9bd1ac18419d8acbb5f87f2cb20675d7332635b18", size = 3340119, upload-time = "2026-04-20T15:00:32.19Z" }, + { url = "https://files.pythonhosted.org/packages/0d/64/de69c5feed470617f243e61cac841bfd1b5273d575c3d3b49b27f738e334/pypdfium2-5.7.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a146d036a6b085a406aa256548b827b63016714fd77f8e11b7f704c1175e8cc", size = 3738864, upload-time = "2026-04-20T15:00:33.798Z" }, + { url = "https://files.pythonhosted.org/packages/07/ce/69ff10766565c5ffcb66cebe780ce3bc4fe7cc16b218df8c240075881c66/pypdfium2-5.7.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3397b0d705b6858c87dec1dc9c44d4c7094601a9b231097f441b64d1a7d5ff0b", size = 4169839, upload-time = "2026-04-20T15:00:35.973Z" }, + { url = "https://files.pythonhosted.org/packages/03/4b/fff16a831a6f07aad02da0d02b620c455310b8bf4e2642909175dcb7ccae/pypdfium2-5.7.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc2cdf603ac766b91b7c1b455197ec1c3471089d75f999b046edb65ed6cedd80", size = 3657630, upload-time = "2026-04-20T15:00:38.407Z" }, + { url = "https://files.pythonhosted.org/packages/9b/58/d3148917616164cfad347b0b509342737ed80e060afab07523ffeac2a05f/pypdfium2-5.7.1-py3-none-manylinux_2_27_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b1a6a5f3320b59138e7570a3f78840540383d058ac180a9a21f924ad3bd7f83", size = 3088898, upload-time = "2026-04-20T15:00:40.109Z" }, + { url = "https://files.pythonhosted.org/packages/e4/1d/387ca4dfe9865a8d61114dae2debba4d86eed07cdc6a31c5527a049583be/pypdfium2-5.7.1-py3-none-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:91b809c40a5fc248107d13fbcf1dd2c64dbc8e572693a9b93e350bf31efda92b", size = 2955404, upload-time = "2026-04-20T15:00:41.921Z" }, + { url = "https://files.pythonhosted.org/packages/ad/87/4afc2bfe35d71942f1bf9e774086f74af66a0a4e56338f39a7cbc5b8721c/pypdfium2-5.7.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85611ef61cbc0f5e04de8f99fec0f3db3920b09f46c62afa08c9caa21a74b353", size = 4126600, upload-time = "2026-04-20T15:00:44.079Z" }, + { url = "https://files.pythonhosted.org/packages/b1/c4/872eef4cb8f0d8ebbf967ca713254ac71c75878a1d5798bc2b8d23104e52/pypdfium2-5.7.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b2764ab909f9b444d4e643be90b064c4053e6828c28bfd47639fc84526ba244d", size = 3742636, upload-time = "2026-04-20T15:00:46.009Z" }, + { url = "https://files.pythonhosted.org/packages/10/6d/3805a53623a72e20b68e6814b37582994298b231628656ff227fa1158a1f/pypdfium2-5.7.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:fcea3cc20b7cca7d84ceee68b9c6ef7fe773fb71c145542769dc2ceb27e9698a", size = 4332743, upload-time = "2026-04-20T15:00:47.829Z" }, + { url = "https://files.pythonhosted.org/packages/92/61/3e3f8ae7ad04400bc3c6a75bbf59db500eaf9dff05477d1b25ff4a36363b/pypdfium2-5.7.1-py3-none-musllinux_1_2_ppc64le.whl", hash = "sha256:f04546bc314973397148805d44f8e660e81aa80c2a87e12afb892c11493ded6c", size = 4377471, upload-time = "2026-04-20T15:00:49.443Z" }, + { url = "https://files.pythonhosted.org/packages/8d/e0/1026f297b5be292cae7095aa4814d57faa3faba0b49552afcaa11a1c2e4e/pypdfium2-5.7.1-py3-none-musllinux_1_2_riscv64.whl", hash = "sha256:66275c8a854969bdf905abc7599e5623d62739c44604d69788ff5457082d275b", size = 3919215, upload-time = "2026-04-20T15:00:51.2Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5d/7d6d5b392fa42a997aadf127e3b2c25739199141054b33f759ba5d02e653/pypdfium2-5.7.1-py3-none-musllinux_1_2_s390x.whl", hash = "sha256:bbed8f32040ce3b3236a512265976017c2465ea6643a1730f008b39e0339b8ce", size = 4263089, upload-time = "2026-04-20T15:00:53.105Z" }, + { url = "https://files.pythonhosted.org/packages/2e/b8/d51bd4a1d426fa5b99d4516c77cc1892a8fbfd5a93a823e2679cf9b09ee0/pypdfium2-5.7.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c55d3df09bd0d72a1d192107dcbf80bcb2791662a3eca3b084001f947d3040d5", size = 4175967, upload-time = "2026-04-20T15:00:54.757Z" }, + { url = "https://files.pythonhosted.org/packages/30/52/06a6358856374ae4400ee1ad0ddaa01d5c31fcd6e8f4577e6a3ed1c40343/pypdfium2-5.7.1-py3-none-win32.whl", hash = "sha256:4f6bbe1211c5883c8fc9ce11008347e5b96ec6571456d959ae289cecdb2867f0", size = 3629154, upload-time = "2026-04-20T15:00:56.916Z" }, + { url = "https://files.pythonhosted.org/packages/6f/13/e0dbc9377d976d8b03ed0dd07fe9892e06d09fcf4f6a0e66df49366227d7/pypdfium2-5.7.1-py3-none-win_amd64.whl", hash = "sha256:fdf117af26bd310f4f176b3cf0e2e23f0f800e48dcf2bcf6c2cca0de3326f5cb", size = 3747295, upload-time = "2026-04-20T15:00:59.15Z" }, + { url = "https://files.pythonhosted.org/packages/bc/67/4759522f5bca0ac4cda9f42c7f3f818aa826568793bd8b4532d2d2ffa515/pypdfium2-5.7.1-py3-none-win_arm64.whl", hash = "sha256:622821698fcc30fc560bd4eead6df9e6b846de9876b82861bed0091c09a4c27b", size = 3540903, upload-time = "2026-04-20T15:01:00.994Z" }, ] [[package]] @@ -3113,6 +3496,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249, upload-time = "2026-04-07T17:16:16.13Z" }, ] +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -3127,11 +3523,11 @@ wheels = [ [[package]] name = "python-dotenv" -version = "1.2.1" +version = "1.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, + { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, ] [[package]] @@ -3160,24 +3556,15 @@ wheels = [ [[package]] name = "python-telegram-bot" -version = "22.6" +version = "22.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpcore", marker = "python_full_version >= '3.14'" }, { name = "httpx" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cd/9b/8df90c85404166a6631e857027866263adb27440d8af1dbeffbdc4f0166c/python_telegram_bot-22.6.tar.gz", hash = "sha256:50ae8cc10f8dff01445628687951020721f37956966b92a91df4c1bf2d113742", size = 1503761, upload-time = "2026-01-24T13:57:00.269Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/25/2258161b1069e66d6c39c0a602dbe57461d4767dc0012539970ea40bc9d6/python_telegram_bot-22.7.tar.gz", hash = "sha256:784b59ea3852fe4616ad63b4a0264c755637f5d725e87755ecdee28300febf61", size = 1516454, upload-time = "2026-03-16T09:36:03.174Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/97/7298f0e1afe3a1ae52ff4c5af5087ed4de319ea73eb3b5c8c4dd4e76e708/python_telegram_bot-22.6-py3-none-any.whl", hash = "sha256:e598fe171c3dde2dfd0f001619ee9110eece66761a677b34719fb18934935ce0", size = 737267, upload-time = "2026-01-24T13:56:58.06Z" }, -] - -[[package]] -name = "pytz" -version = "2026.1.post1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/56/db/b8721d71d945e6a8ac63c0fc900b2067181dbb50805958d4d4661cf7d277/pytz-2026.1.post1.tar.gz", hash = "sha256:3378dde6a0c3d26719182142c56e60c7f9af7e968076f31aae569d72a0358ee1", size = 321088, upload-time = "2026-03-03T07:47:50.683Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/10/99/781fe0c827be2742bcc775efefccb3b048a3a9c6ce9aec0cbf4a101677e5/pytz-2026.1.post1-py2.py3-none-any.whl", hash = "sha256:f2fd16142fda348286a75e1a524be810bb05d444e5a081f37f7affc635035f7a", size = 510489, upload-time = "2026-03-03T07:47:49.167Z" }, + { url = "https://files.pythonhosted.org/packages/94/f7/0e2f89dd62f45d46d4ea0d8aec5893ce5b37389638db010c117f46f11450/python_telegram_bot-22.7-py3-none-any.whl", hash = "sha256:d72eed532cf763758cd9331b57a6d790aff0bb4d37d8f4e92149436fe21c6475", size = 745365, upload-time = "2026-03-16T09:36:01.498Z" }, ] [[package]] @@ -3273,85 +3660,95 @@ wheels = [ [[package]] name = "regex" -version = "2025.11.3" +version = "2026.4.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/3a246dbf05666918bd3664d9d787f84a9108f6f43cc953a077e4a7dfdb7e/regex-2026.4.4.tar.gz", hash = "sha256:e08270659717f6973523ce3afbafa53515c4dc5dcad637dc215b6fd50f689423", size = 416000, upload-time = "2026-04-03T20:56:28.155Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/74/18f04cb53e58e3fb107439699bd8375cf5a835eec81084e0bddbd122e4c2/regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41", size = 489312, upload-time = "2025-11-03T21:31:34.343Z" }, - { url = "https://files.pythonhosted.org/packages/78/3f/37fcdd0d2b1e78909108a876580485ea37c91e1acf66d3bb8e736348f441/regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36", size = 291256, upload-time = "2025-11-03T21:31:35.675Z" }, - { url = "https://files.pythonhosted.org/packages/bf/26/0a575f58eb23b7ebd67a45fccbc02ac030b737b896b7e7a909ffe43ffd6a/regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1", size = 288921, upload-time = "2025-11-03T21:31:37.07Z" }, - { url = "https://files.pythonhosted.org/packages/ea/98/6a8dff667d1af907150432cf5abc05a17ccd32c72a3615410d5365ac167a/regex-2025.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08b884f4226602ad40c5d55f52bf91a9df30f513864e0054bad40c0e9cf1afb7", size = 798568, upload-time = "2025-11-03T21:31:38.784Z" }, - { url = "https://files.pythonhosted.org/packages/64/15/92c1db4fa4e12733dd5a526c2dd2b6edcbfe13257e135fc0f6c57f34c173/regex-2025.11.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e0b11b2b2433d1c39c7c7a30e3f3d0aeeea44c2a8d0bae28f6b95f639927a69", size = 864165, upload-time = "2025-11-03T21:31:40.559Z" }, - { url = "https://files.pythonhosted.org/packages/f9/e7/3ad7da8cdee1ce66c7cd37ab5ab05c463a86ffeb52b1a25fe7bd9293b36c/regex-2025.11.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87eb52a81ef58c7ba4d45c3ca74e12aa4b4e77816f72ca25258a85b3ea96cb48", size = 912182, upload-time = "2025-11-03T21:31:42.002Z" }, - { url = "https://files.pythonhosted.org/packages/84/bd/9ce9f629fcb714ffc2c3faf62b6766ecb7a585e1e885eb699bcf130a5209/regex-2025.11.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a12ab1f5c29b4e93db518f5e3872116b7e9b1646c9f9f426f777b50d44a09e8c", size = 803501, upload-time = "2025-11-03T21:31:43.815Z" }, - { url = "https://files.pythonhosted.org/packages/7c/0f/8dc2e4349d8e877283e6edd6c12bdcebc20f03744e86f197ab6e4492bf08/regex-2025.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7521684c8c7c4f6e88e35ec89680ee1aa8358d3f09d27dfbdf62c446f5d4c695", size = 787842, upload-time = "2025-11-03T21:31:45.353Z" }, - { url = "https://files.pythonhosted.org/packages/f9/73/cff02702960bc185164d5619c0c62a2f598a6abff6695d391b096237d4ab/regex-2025.11.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7fe6e5440584e94cc4b3f5f4d98a25e29ca12dccf8873679a635638349831b98", size = 858519, upload-time = "2025-11-03T21:31:46.814Z" }, - { url = "https://files.pythonhosted.org/packages/61/83/0e8d1ae71e15bc1dc36231c90b46ee35f9d52fab2e226b0e039e7ea9c10a/regex-2025.11.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8e026094aa12b43f4fd74576714e987803a315c76edb6b098b9809db5de58f74", size = 850611, upload-time = "2025-11-03T21:31:48.289Z" }, - { url = "https://files.pythonhosted.org/packages/c8/f5/70a5cdd781dcfaa12556f2955bf170cd603cb1c96a1827479f8faea2df97/regex-2025.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:435bbad13e57eb5606a68443af62bed3556de2f46deb9f7d4237bc2f1c9fb3a0", size = 789759, upload-time = "2025-11-03T21:31:49.759Z" }, - { url = "https://files.pythonhosted.org/packages/59/9b/7c29be7903c318488983e7d97abcf8ebd3830e4c956c4c540005fcfb0462/regex-2025.11.3-cp312-cp312-win32.whl", hash = "sha256:3839967cf4dc4b985e1570fd8d91078f0c519f30491c60f9ac42a8db039be204", size = 266194, upload-time = "2025-11-03T21:31:51.53Z" }, - { url = "https://files.pythonhosted.org/packages/1a/67/3b92df89f179d7c367be654ab5626ae311cb28f7d5c237b6bb976cd5fbbb/regex-2025.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:e721d1b46e25c481dc5ded6f4b3f66c897c58d2e8cfdf77bbced84339108b0b9", size = 277069, upload-time = "2025-11-03T21:31:53.151Z" }, - { url = "https://files.pythonhosted.org/packages/d7/55/85ba4c066fe5094d35b249c3ce8df0ba623cfd35afb22d6764f23a52a1c5/regex-2025.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:64350685ff08b1d3a6fff33f45a9ca183dc1d58bbfe4981604e70ec9801bbc26", size = 270330, upload-time = "2025-11-03T21:31:54.514Z" }, - { url = "https://files.pythonhosted.org/packages/e1/a7/dda24ebd49da46a197436ad96378f17df30ceb40e52e859fc42cac45b850/regex-2025.11.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c1e448051717a334891f2b9a620fe36776ebf3dd8ec46a0b877c8ae69575feb4", size = 489081, upload-time = "2025-11-03T21:31:55.9Z" }, - { url = "https://files.pythonhosted.org/packages/19/22/af2dc751aacf88089836aa088a1a11c4f21a04707eb1b0478e8e8fb32847/regex-2025.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9b5aca4d5dfd7fbfbfbdaf44850fcc7709a01146a797536a8f84952e940cca76", size = 291123, upload-time = "2025-11-03T21:31:57.758Z" }, - { url = "https://files.pythonhosted.org/packages/a3/88/1a3ea5672f4b0a84802ee9891b86743438e7c04eb0b8f8c4e16a42375327/regex-2025.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:04d2765516395cf7dda331a244a3282c0f5ae96075f728629287dfa6f76ba70a", size = 288814, upload-time = "2025-11-03T21:32:01.12Z" }, - { url = "https://files.pythonhosted.org/packages/fb/8c/f5987895bf42b8ddeea1b315c9fedcfe07cadee28b9c98cf50d00adcb14d/regex-2025.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d9903ca42bfeec4cebedba8022a7c97ad2aab22e09573ce9976ba01b65e4361", size = 798592, upload-time = "2025-11-03T21:32:03.006Z" }, - { url = "https://files.pythonhosted.org/packages/99/2a/6591ebeede78203fa77ee46a1c36649e02df9eaa77a033d1ccdf2fcd5d4e/regex-2025.11.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:639431bdc89d6429f6721625e8129413980ccd62e9d3f496be618a41d205f160", size = 864122, upload-time = "2025-11-03T21:32:04.553Z" }, - { url = "https://files.pythonhosted.org/packages/94/d6/be32a87cf28cf8ed064ff281cfbd49aefd90242a83e4b08b5a86b38e8eb4/regex-2025.11.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f117efad42068f9715677c8523ed2be1518116d1c49b1dd17987716695181efe", size = 912272, upload-time = "2025-11-03T21:32:06.148Z" }, - { url = "https://files.pythonhosted.org/packages/62/11/9bcef2d1445665b180ac7f230406ad80671f0fc2a6ffb93493b5dd8cd64c/regex-2025.11.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4aecb6f461316adf9f1f0f6a4a1a3d79e045f9b71ec76055a791affa3b285850", size = 803497, upload-time = "2025-11-03T21:32:08.162Z" }, - { url = "https://files.pythonhosted.org/packages/e5/a7/da0dc273d57f560399aa16d8a68ae7f9b57679476fc7ace46501d455fe84/regex-2025.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b3a5f320136873cc5561098dfab677eea139521cb9a9e8db98b7e64aef44cbc", size = 787892, upload-time = "2025-11-03T21:32:09.769Z" }, - { url = "https://files.pythonhosted.org/packages/da/4b/732a0c5a9736a0b8d6d720d4945a2f1e6f38f87f48f3173559f53e8d5d82/regex-2025.11.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75fa6f0056e7efb1f42a1c34e58be24072cb9e61a601340cc1196ae92326a4f9", size = 858462, upload-time = "2025-11-03T21:32:11.769Z" }, - { url = "https://files.pythonhosted.org/packages/0c/f5/a2a03df27dc4c2d0c769220f5110ba8c4084b0bfa9ab0f9b4fcfa3d2b0fc/regex-2025.11.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:dbe6095001465294f13f1adcd3311e50dd84e5a71525f20a10bd16689c61ce0b", size = 850528, upload-time = "2025-11-03T21:32:13.906Z" }, - { url = "https://files.pythonhosted.org/packages/d6/09/e1cd5bee3841c7f6eb37d95ca91cdee7100b8f88b81e41c2ef426910891a/regex-2025.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:454d9b4ae7881afbc25015b8627c16d88a597479b9dea82b8c6e7e2e07240dc7", size = 789866, upload-time = "2025-11-03T21:32:15.748Z" }, - { url = "https://files.pythonhosted.org/packages/eb/51/702f5ea74e2a9c13d855a6a85b7f80c30f9e72a95493260193c07f3f8d74/regex-2025.11.3-cp313-cp313-win32.whl", hash = "sha256:28ba4d69171fc6e9896337d4fc63a43660002b7da53fc15ac992abcf3410917c", size = 266189, upload-time = "2025-11-03T21:32:17.493Z" }, - { url = "https://files.pythonhosted.org/packages/8b/00/6e29bb314e271a743170e53649db0fdb8e8ff0b64b4f425f5602f4eb9014/regex-2025.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:bac4200befe50c670c405dc33af26dad5a3b6b255dd6c000d92fe4629f9ed6a5", size = 277054, upload-time = "2025-11-03T21:32:19.042Z" }, - { url = "https://files.pythonhosted.org/packages/25/f1/b156ff9f2ec9ac441710764dda95e4edaf5f36aca48246d1eea3f1fd96ec/regex-2025.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:2292cd5a90dab247f9abe892ac584cb24f0f54680c73fcb4a7493c66c2bf2467", size = 270325, upload-time = "2025-11-03T21:32:21.338Z" }, - { url = "https://files.pythonhosted.org/packages/20/28/fd0c63357caefe5680b8ea052131acbd7f456893b69cc2a90cc3e0dc90d4/regex-2025.11.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1eb1ebf6822b756c723e09f5186473d93236c06c579d2cc0671a722d2ab14281", size = 491984, upload-time = "2025-11-03T21:32:23.466Z" }, - { url = "https://files.pythonhosted.org/packages/df/ec/7014c15626ab46b902b3bcc4b28a7bae46d8f281fc7ea9c95e22fcaaa917/regex-2025.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1e00ec2970aab10dc5db34af535f21fcf32b4a31d99e34963419636e2f85ae39", size = 292673, upload-time = "2025-11-03T21:32:25.034Z" }, - { url = "https://files.pythonhosted.org/packages/23/ab/3b952ff7239f20d05f1f99e9e20188513905f218c81d52fb5e78d2bf7634/regex-2025.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a4cb042b615245d5ff9b3794f56be4138b5adc35a4166014d31d1814744148c7", size = 291029, upload-time = "2025-11-03T21:32:26.528Z" }, - { url = "https://files.pythonhosted.org/packages/21/7e/3dc2749fc684f455f162dcafb8a187b559e2614f3826877d3844a131f37b/regex-2025.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44f264d4bf02f3176467d90b294d59bf1db9fe53c141ff772f27a8b456b2a9ed", size = 807437, upload-time = "2025-11-03T21:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/1b/0b/d529a85ab349c6a25d1ca783235b6e3eedf187247eab536797021f7126c6/regex-2025.11.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7be0277469bf3bd7a34a9c57c1b6a724532a0d235cd0dc4e7f4316f982c28b19", size = 873368, upload-time = "2025-11-03T21:32:30.4Z" }, - { url = "https://files.pythonhosted.org/packages/7d/18/2d868155f8c9e3e9d8f9e10c64e9a9f496bb8f7e037a88a8bed26b435af6/regex-2025.11.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0d31e08426ff4b5b650f68839f5af51a92a5b51abd8554a60c2fbc7c71f25d0b", size = 914921, upload-time = "2025-11-03T21:32:32.123Z" }, - { url = "https://files.pythonhosted.org/packages/2d/71/9d72ff0f354fa783fe2ba913c8734c3b433b86406117a8db4ea2bf1c7a2f/regex-2025.11.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e43586ce5bd28f9f285a6e729466841368c4a0353f6fd08d4ce4630843d3648a", size = 812708, upload-time = "2025-11-03T21:32:34.305Z" }, - { url = "https://files.pythonhosted.org/packages/e7/19/ce4bf7f5575c97f82b6e804ffb5c4e940c62609ab2a0d9538d47a7fdf7d4/regex-2025.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f9397d561a4c16829d4e6ff75202c1c08b68a3bdbfe29dbfcdb31c9830907c6", size = 795472, upload-time = "2025-11-03T21:32:36.364Z" }, - { url = "https://files.pythonhosted.org/packages/03/86/fd1063a176ffb7b2315f9a1b08d17b18118b28d9df163132615b835a26ee/regex-2025.11.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:dd16e78eb18ffdb25ee33a0682d17912e8cc8a770e885aeee95020046128f1ce", size = 868341, upload-time = "2025-11-03T21:32:38.042Z" }, - { url = "https://files.pythonhosted.org/packages/12/43/103fb2e9811205e7386366501bc866a164a0430c79dd59eac886a2822950/regex-2025.11.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:ffcca5b9efe948ba0661e9df0fa50d2bc4b097c70b9810212d6b62f05d83b2dd", size = 854666, upload-time = "2025-11-03T21:32:40.079Z" }, - { url = "https://files.pythonhosted.org/packages/7d/22/e392e53f3869b75804762c7c848bd2dd2abf2b70fb0e526f58724638bd35/regex-2025.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c56b4d162ca2b43318ac671c65bd4d563e841a694ac70e1a976ac38fcf4ca1d2", size = 799473, upload-time = "2025-11-03T21:32:42.148Z" }, - { url = "https://files.pythonhosted.org/packages/4f/f9/8bd6b656592f925b6845fcbb4d57603a3ac2fb2373344ffa1ed70aa6820a/regex-2025.11.3-cp313-cp313t-win32.whl", hash = "sha256:9ddc42e68114e161e51e272f667d640f97e84a2b9ef14b7477c53aac20c2d59a", size = 268792, upload-time = "2025-11-03T21:32:44.13Z" }, - { url = "https://files.pythonhosted.org/packages/e5/87/0e7d603467775ff65cd2aeabf1b5b50cc1c3708556a8b849a2fa4dd1542b/regex-2025.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7a7c7fdf755032ffdd72c77e3d8096bdcb0eb92e89e17571a196f03d88b11b3c", size = 280214, upload-time = "2025-11-03T21:32:45.853Z" }, - { url = "https://files.pythonhosted.org/packages/8d/d0/2afc6f8e94e2b64bfb738a7c2b6387ac1699f09f032d363ed9447fd2bb57/regex-2025.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:df9eb838c44f570283712e7cff14c16329a9f0fb19ca492d21d4b7528ee6821e", size = 271469, upload-time = "2025-11-03T21:32:48.026Z" }, - { url = "https://files.pythonhosted.org/packages/31/e9/f6e13de7e0983837f7b6d238ad9458800a874bf37c264f7923e63409944c/regex-2025.11.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9697a52e57576c83139d7c6f213d64485d3df5bf84807c35fa409e6c970801c6", size = 489089, upload-time = "2025-11-03T21:32:50.027Z" }, - { url = "https://files.pythonhosted.org/packages/a3/5c/261f4a262f1fa65141c1b74b255988bd2fa020cc599e53b080667d591cfc/regex-2025.11.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e18bc3f73bd41243c9b38a6d9f2366cd0e0137a9aebe2d8ff76c5b67d4c0a3f4", size = 291059, upload-time = "2025-11-03T21:32:51.682Z" }, - { url = "https://files.pythonhosted.org/packages/8e/57/f14eeb7f072b0e9a5a090d1712741fd8f214ec193dba773cf5410108bb7d/regex-2025.11.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:61a08bcb0ec14ff4e0ed2044aad948d0659604f824cbd50b55e30b0ec6f09c73", size = 288900, upload-time = "2025-11-03T21:32:53.569Z" }, - { url = "https://files.pythonhosted.org/packages/3c/6b/1d650c45e99a9b327586739d926a1cd4e94666b1bd4af90428b36af66dc7/regex-2025.11.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9c30003b9347c24bcc210958c5d167b9e4f9be786cb380a7d32f14f9b84674f", size = 799010, upload-time = "2025-11-03T21:32:55.222Z" }, - { url = "https://files.pythonhosted.org/packages/99/ee/d66dcbc6b628ce4e3f7f0cbbb84603aa2fc0ffc878babc857726b8aab2e9/regex-2025.11.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4e1e592789704459900728d88d41a46fe3969b82ab62945560a31732ffc19a6d", size = 864893, upload-time = "2025-11-03T21:32:57.239Z" }, - { url = "https://files.pythonhosted.org/packages/bf/2d/f238229f1caba7ac87a6c4153d79947fb0261415827ae0f77c304260c7d3/regex-2025.11.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6538241f45eb5a25aa575dbba1069ad786f68a4f2773a29a2bd3dd1f9de787be", size = 911522, upload-time = "2025-11-03T21:32:59.274Z" }, - { url = "https://files.pythonhosted.org/packages/bd/3d/22a4eaba214a917c80e04f6025d26143690f0419511e0116508e24b11c9b/regex-2025.11.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce22519c989bb72a7e6b36a199384c53db7722fe669ba891da75907fe3587db", size = 803272, upload-time = "2025-11-03T21:33:01.393Z" }, - { url = "https://files.pythonhosted.org/packages/84/b1/03188f634a409353a84b5ef49754b97dbcc0c0f6fd6c8ede505a8960a0a4/regex-2025.11.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:66d559b21d3640203ab9075797a55165d79017520685fb407b9234d72ab63c62", size = 787958, upload-time = "2025-11-03T21:33:03.379Z" }, - { url = "https://files.pythonhosted.org/packages/99/6a/27d072f7fbf6fadd59c64d210305e1ff865cc3b78b526fd147db768c553b/regex-2025.11.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:669dcfb2e38f9e8c69507bace46f4889e3abbfd9b0c29719202883c0a603598f", size = 859289, upload-time = "2025-11-03T21:33:05.374Z" }, - { url = "https://files.pythonhosted.org/packages/9a/70/1b3878f648e0b6abe023172dacb02157e685564853cc363d9961bcccde4e/regex-2025.11.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:32f74f35ff0f25a5021373ac61442edcb150731fbaa28286bbc8bb1582c89d02", size = 850026, upload-time = "2025-11-03T21:33:07.131Z" }, - { url = "https://files.pythonhosted.org/packages/dd/d5/68e25559b526b8baab8e66839304ede68ff6727237a47727d240006bd0ff/regex-2025.11.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e6c7a21dffba883234baefe91bc3388e629779582038f75d2a5be918e250f0ed", size = 789499, upload-time = "2025-11-03T21:33:09.141Z" }, - { url = "https://files.pythonhosted.org/packages/fc/df/43971264857140a350910d4e33df725e8c94dd9dee8d2e4729fa0d63d49e/regex-2025.11.3-cp314-cp314-win32.whl", hash = "sha256:795ea137b1d809eb6836b43748b12634291c0ed55ad50a7d72d21edf1cd565c4", size = 271604, upload-time = "2025-11-03T21:33:10.9Z" }, - { url = "https://files.pythonhosted.org/packages/01/6f/9711b57dc6894a55faf80a4c1b5aa4f8649805cb9c7aef46f7d27e2b9206/regex-2025.11.3-cp314-cp314-win_amd64.whl", hash = "sha256:9f95fbaa0ee1610ec0fc6b26668e9917a582ba80c52cc6d9ada15e30aa9ab9ad", size = 280320, upload-time = "2025-11-03T21:33:12.572Z" }, - { url = "https://files.pythonhosted.org/packages/f1/7e/f6eaa207d4377481f5e1775cdeb5a443b5a59b392d0065f3417d31d80f87/regex-2025.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:dfec44d532be4c07088c3de2876130ff0fbeeacaa89a137decbbb5f665855a0f", size = 273372, upload-time = "2025-11-03T21:33:14.219Z" }, - { url = "https://files.pythonhosted.org/packages/c3/06/49b198550ee0f5e4184271cee87ba4dfd9692c91ec55289e6282f0f86ccf/regex-2025.11.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ba0d8a5d7f04f73ee7d01d974d47c5834f8a1b0224390e4fe7c12a3a92a78ecc", size = 491985, upload-time = "2025-11-03T21:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/ce/bf/abdafade008f0b1c9da10d934034cb670432d6cf6cbe38bbb53a1cfd6cf8/regex-2025.11.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:442d86cf1cfe4faabf97db7d901ef58347efd004934da045c745e7b5bd57ac49", size = 292669, upload-time = "2025-11-03T21:33:18.32Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ef/0c357bb8edbd2ad8e273fcb9e1761bc37b8acbc6e1be050bebd6475f19c1/regex-2025.11.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fd0a5e563c756de210bb964789b5abe4f114dacae9104a47e1a649b910361536", size = 291030, upload-time = "2025-11-03T21:33:20.048Z" }, - { url = "https://files.pythonhosted.org/packages/79/06/edbb67257596649b8fb088d6aeacbcb248ac195714b18a65e018bf4c0b50/regex-2025.11.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf3490bcbb985a1ae97b2ce9ad1c0f06a852d5b19dde9b07bdf25bf224248c95", size = 807674, upload-time = "2025-11-03T21:33:21.797Z" }, - { url = "https://files.pythonhosted.org/packages/f4/d9/ad4deccfce0ea336296bd087f1a191543bb99ee1c53093dcd4c64d951d00/regex-2025.11.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3809988f0a8b8c9dcc0f92478d6501fac7200b9ec56aecf0ec21f4a2ec4b6009", size = 873451, upload-time = "2025-11-03T21:33:23.741Z" }, - { url = "https://files.pythonhosted.org/packages/13/75/a55a4724c56ef13e3e04acaab29df26582f6978c000ac9cd6810ad1f341f/regex-2025.11.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f4ff94e58e84aedb9c9fce66d4ef9f27a190285b451420f297c9a09f2b9abee9", size = 914980, upload-time = "2025-11-03T21:33:25.999Z" }, - { url = "https://files.pythonhosted.org/packages/67/1e/a1657ee15bd9116f70d4a530c736983eed997b361e20ecd8f5ca3759d5c5/regex-2025.11.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eb542fd347ce61e1321b0a6b945d5701528dca0cd9759c2e3bb8bd57e47964d", size = 812852, upload-time = "2025-11-03T21:33:27.852Z" }, - { url = "https://files.pythonhosted.org/packages/b8/6f/f7516dde5506a588a561d296b2d0044839de06035bb486b326065b4c101e/regex-2025.11.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2d5919075a1f2e413c00b056ea0c2f065b3f5fe83c3d07d325ab92dce51d6", size = 795566, upload-time = "2025-11-03T21:33:32.364Z" }, - { url = "https://files.pythonhosted.org/packages/d9/dd/3d10b9e170cc16fb34cb2cef91513cf3df65f440b3366030631b2984a264/regex-2025.11.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3f8bf11a4827cc7ce5a53d4ef6cddd5ad25595d3c1435ef08f76825851343154", size = 868463, upload-time = "2025-11-03T21:33:34.459Z" }, - { url = "https://files.pythonhosted.org/packages/f5/8e/935e6beff1695aa9085ff83195daccd72acc82c81793df480f34569330de/regex-2025.11.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:22c12d837298651e5550ac1d964e4ff57c3f56965fc1812c90c9fb2028eaf267", size = 854694, upload-time = "2025-11-03T21:33:36.793Z" }, - { url = "https://files.pythonhosted.org/packages/92/12/10650181a040978b2f5720a6a74d44f841371a3d984c2083fc1752e4acf6/regex-2025.11.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:62ba394a3dda9ad41c7c780f60f6e4a70988741415ae96f6d1bf6c239cf01379", size = 799691, upload-time = "2025-11-03T21:33:39.079Z" }, - { url = "https://files.pythonhosted.org/packages/67/90/8f37138181c9a7690e7e4cb388debbd389342db3c7381d636d2875940752/regex-2025.11.3-cp314-cp314t-win32.whl", hash = "sha256:4bf146dca15cdd53224a1bf46d628bd7590e4a07fbb69e720d561aea43a32b38", size = 274583, upload-time = "2025-11-03T21:33:41.302Z" }, - { url = "https://files.pythonhosted.org/packages/8f/cd/867f5ec442d56beb56f5f854f40abcfc75e11d10b11fdb1869dd39c63aaf/regex-2025.11.3-cp314-cp314t-win_amd64.whl", hash = "sha256:adad1a1bcf1c9e76346e091d22d23ac54ef28e1365117d99521631078dfec9de", size = 284286, upload-time = "2025-11-03T21:33:43.324Z" }, - { url = "https://files.pythonhosted.org/packages/20/31/32c0c4610cbc070362bf1d2e4ea86d1ea29014d400a6d6c2486fcfd57766/regex-2025.11.3-cp314-cp314t-win_arm64.whl", hash = "sha256:c54f768482cef41e219720013cd05933b6f971d9562544d691c68699bf2b6801", size = 274741, upload-time = "2025-11-03T21:33:45.557Z" }, + { url = "https://files.pythonhosted.org/packages/e5/28/b972a4d3df61e1d7bcf1b59fdb3cddef22f88b6be43f161bb41ebc0e4081/regex-2026.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c07ab8794fa929e58d97a0e1796b8b76f70943fa39df225ac9964615cf1f9d52", size = 490434, upload-time = "2026-04-03T20:53:40.219Z" }, + { url = "https://files.pythonhosted.org/packages/84/20/30041446cf6dc3e0eab344fc62770e84c23b6b68a3b657821f9f80cb69b4/regex-2026.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2c785939dc023a1ce4ec09599c032cc9933d258a998d16ca6f2b596c010940eb", size = 292061, upload-time = "2026-04-03T20:53:41.862Z" }, + { url = "https://files.pythonhosted.org/packages/62/c8/3baa06d75c98c46d4cc4262b71fd2edb9062b5665e868bca57859dadf93a/regex-2026.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b1ce5c81c9114f1ce2f9288a51a8fd3aeea33a0cc440c415bf02da323aa0a76", size = 289628, upload-time = "2026-04-03T20:53:43.701Z" }, + { url = "https://files.pythonhosted.org/packages/31/87/3accf55634caad8c0acab23f5135ef7d4a21c39f28c55c816ae012931408/regex-2026.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:760ef21c17d8e6a4fe8cf406a97cf2806a4df93416ccc82fc98d25b1c20425be", size = 796651, upload-time = "2026-04-03T20:53:45.379Z" }, + { url = "https://files.pythonhosted.org/packages/f6/0c/aaa2c83f34efedbf06f61cb1942c25f6cf1ee3b200f832c4d05f28306c2e/regex-2026.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7088fcdcb604a4417c208e2169715800d28838fefd7455fbe40416231d1d47c1", size = 865916, upload-time = "2026-04-03T20:53:47.064Z" }, + { url = "https://files.pythonhosted.org/packages/d9/f6/8c6924c865124643e8f37823eca845dc27ac509b2ee58123685e71cd0279/regex-2026.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:07edca1ba687998968f7db5bc355288d0c6505caa7374f013d27356d93976d13", size = 912287, upload-time = "2026-04-03T20:53:49.422Z" }, + { url = "https://files.pythonhosted.org/packages/11/0e/a9f6f81013e0deaf559b25711623864970fe6a098314e374ccb1540a4152/regex-2026.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:993f657a7c1c6ec51b5e0ba97c9817d06b84ea5fa8d82e43b9405de0defdc2b9", size = 801126, upload-time = "2026-04-03T20:53:51.096Z" }, + { url = "https://files.pythonhosted.org/packages/71/61/3a0cc8af2dc0c8deb48e644dd2521f173f7e6513c6e195aad9aa8dd77ac5/regex-2026.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:2b69102a743e7569ebee67e634a69c4cb7e59d6fa2e1aa7d3bdbf3f61435f62d", size = 776788, upload-time = "2026-04-03T20:53:52.889Z" }, + { url = "https://files.pythonhosted.org/packages/64/0b/8bb9cbf21ef7dee58e49b0fdb066a7aded146c823202e16494a36777594f/regex-2026.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dac006c8b6dda72d86ea3d1333d45147de79a3a3f26f10c1cf9287ca4ca0ac3", size = 785184, upload-time = "2026-04-03T20:53:55.627Z" }, + { url = "https://files.pythonhosted.org/packages/99/c2/d3e80e8137b25ee06c92627de4e4d98b94830e02b3e6f81f3d2e3f504cf5/regex-2026.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:50a766ee2010d504554bfb5f578ed2e066898aa26411d57e6296230627cdefa0", size = 859913, upload-time = "2026-04-03T20:53:57.249Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/9d5d876157d969c804622456ef250017ac7a8f83e0e14f903b9e6df5ce95/regex-2026.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9e2f5217648f68e3028c823df58663587c1507a5ba8419f4fdfc8a461be76043", size = 765732, upload-time = "2026-04-03T20:53:59.428Z" }, + { url = "https://files.pythonhosted.org/packages/82/80/b568935b4421388561c8ed42aff77247285d3ae3bb2a6ca22af63bae805e/regex-2026.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:39d8de85a08e32632974151ba59c6e9140646dcc36c80423962b1c5c0a92e244", size = 852152, upload-time = "2026-04-03T20:54:01.505Z" }, + { url = "https://files.pythonhosted.org/packages/39/29/f0f81217e21cd998245da047405366385d5c6072048038a3d33b37a79dc0/regex-2026.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:55d9304e0e7178dfb1e106c33edf834097ddf4a890e2f676f6c5118f84390f73", size = 789076, upload-time = "2026-04-03T20:54:03.323Z" }, + { url = "https://files.pythonhosted.org/packages/49/1d/1d957a61976ab9d4e767dd4f9d04b66cc0c41c5e36cf40e2d43688b5ae6f/regex-2026.4.4-cp312-cp312-win32.whl", hash = "sha256:04bb679bc0bde8a7bfb71e991493d47314e7b98380b083df2447cda4b6edb60f", size = 266700, upload-time = "2026-04-03T20:54:05.639Z" }, + { url = "https://files.pythonhosted.org/packages/c5/5c/bf575d396aeb58ea13b06ef2adf624f65b70fafef6950a80fc3da9cae3bc/regex-2026.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:db0ac18435a40a2543dbb3d21e161a6c78e33e8159bd2e009343d224bb03bb1b", size = 277768, upload-time = "2026-04-03T20:54:07.312Z" }, + { url = "https://files.pythonhosted.org/packages/c9/27/049df16ec6a6828ccd72add3c7f54b4df029669bea8e9817df6fff58be90/regex-2026.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:4ce255cc05c1947a12989c6db801c96461947adb7a59990f1360b5983fab4983", size = 270568, upload-time = "2026-04-03T20:54:09.484Z" }, + { url = "https://files.pythonhosted.org/packages/9d/83/c4373bc5f31f2cf4b66f9b7c31005bd87fe66f0dce17701f7db4ee79ee29/regex-2026.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:62f5519042c101762509b1d717b45a69c0139d60414b3c604b81328c01bd1943", size = 490273, upload-time = "2026-04-03T20:54:11.202Z" }, + { url = "https://files.pythonhosted.org/packages/46/f8/fe62afbcc3cf4ad4ac9adeaafd98aa747869ae12d3e8e2ac293d0593c435/regex-2026.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3790ba9fb5dd76715a7afe34dbe603ba03f8820764b1dc929dd08106214ed031", size = 291954, upload-time = "2026-04-03T20:54:13.412Z" }, + { url = "https://files.pythonhosted.org/packages/5a/92/4712b9fe6a33d232eeb1c189484b80c6c4b8422b90e766e1195d6e758207/regex-2026.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8fae3c6e795d7678963f2170152b0d892cf6aee9ee8afc8c45e6be38d5107fe7", size = 289487, upload-time = "2026-04-03T20:54:15.824Z" }, + { url = "https://files.pythonhosted.org/packages/88/2c/f83b93f85e01168f1070f045a42d4c937b69fdb8dd7ae82d307253f7e36e/regex-2026.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:298c3ec2d53225b3bf91142eb9691025bab610e0c0c51592dde149db679b3d17", size = 796646, upload-time = "2026-04-03T20:54:18.229Z" }, + { url = "https://files.pythonhosted.org/packages/df/55/61a2e17bf0c4dc57e11caf8dd11771280d8aaa361785f9e3bc40d653f4a7/regex-2026.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e9638791082eaf5b3ac112c587518ee78e083a11c4b28012d8fe2a0f536dfb17", size = 865904, upload-time = "2026-04-03T20:54:20.019Z" }, + { url = "https://files.pythonhosted.org/packages/45/32/1ac8ed1b5a346b5993a3d256abe0a0f03b0b73c8cc88d928537368ac65b6/regex-2026.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae3e764bd4c5ff55035dc82a8d49acceb42a5298edf6eb2fc4d328ee5dd7afae", size = 912304, upload-time = "2026-04-03T20:54:22.403Z" }, + { url = "https://files.pythonhosted.org/packages/26/47/2ee5c613ab546f0eddebf9905d23e07beb933416b1246c2d8791d01979b4/regex-2026.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ffa81f81b80047ba89a3c69ae6a0f78d06f4a42ce5126b0eb2a0a10ad44e0b2e", size = 801126, upload-time = "2026-04-03T20:54:24.308Z" }, + { url = "https://files.pythonhosted.org/packages/75/cd/41dacd129ca9fd20bd7d02f83e0fad83e034ac8a084ec369c90f55ef37e2/regex-2026.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f56ebf9d70305307a707911b88469213630aba821e77de7d603f9d2f0730687d", size = 776772, upload-time = "2026-04-03T20:54:26.319Z" }, + { url = "https://files.pythonhosted.org/packages/89/6d/5af0b588174cb5f46041fa7dd64d3fd5cd2fe51f18766703d1edc387f324/regex-2026.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:773d1dfd652bbffb09336abf890bfd64785c7463716bf766d0eb3bc19c8b7f27", size = 785228, upload-time = "2026-04-03T20:54:28.387Z" }, + { url = "https://files.pythonhosted.org/packages/b7/3b/f5a72b7045bd59575fc33bf1345f156fcfd5a8484aea6ad84b12c5a82114/regex-2026.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d51d20befd5275d092cdffba57ded05f3c436317ee56466c8928ac32d960edaf", size = 860032, upload-time = "2026-04-03T20:54:30.641Z" }, + { url = "https://files.pythonhosted.org/packages/39/a4/72a317003d6fcd7a573584a85f59f525dfe8f67e355ca74eb6b53d66a5e2/regex-2026.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:0a51cdb3c1e9161154f976cb2bef9894bc063ac82f31b733087ffb8e880137d0", size = 765714, upload-time = "2026-04-03T20:54:32.789Z" }, + { url = "https://files.pythonhosted.org/packages/25/1e/5672e16f34dbbcb2560cc7e6a2fbb26dfa8b270711e730101da4423d3973/regex-2026.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ae5266a82596114e41fb5302140e9630204c1b5f325c770bec654b95dd54b0aa", size = 852078, upload-time = "2026-04-03T20:54:34.546Z" }, + { url = "https://files.pythonhosted.org/packages/f7/0d/c813f0af7c6cc7ed7b9558bac2e5120b60ad0fa48f813e4d4bd55446f214/regex-2026.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c882cd92ec68585e9c1cf36c447ec846c0d94edd706fe59e0c198e65822fd23b", size = 789181, upload-time = "2026-04-03T20:54:36.642Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6d/a344608d1adbd2a95090ddd906cec09a11be0e6517e878d02a5123e0917f/regex-2026.4.4-cp313-cp313-win32.whl", hash = "sha256:05568c4fbf3cb4fa9e28e3af198c40d3237cf6041608a9022285fe567ec3ad62", size = 266690, upload-time = "2026-04-03T20:54:38.343Z" }, + { url = "https://files.pythonhosted.org/packages/31/07/54049f89b46235ca6f45cd6c88668a7050e77d4a15555e47dd40fde75263/regex-2026.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:3384df51ed52db0bea967e21458ab0a414f67cdddfd94401688274e55147bb81", size = 277733, upload-time = "2026-04-03T20:54:40.11Z" }, + { url = "https://files.pythonhosted.org/packages/0e/21/61366a8e20f4d43fb597708cac7f0e2baadb491ecc9549b4980b2be27d16/regex-2026.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:acd38177bd2c8e69a411d6521760806042e244d0ef94e2dd03ecdaa8a3c99427", size = 270565, upload-time = "2026-04-03T20:54:41.883Z" }, + { url = "https://files.pythonhosted.org/packages/f1/1e/3a2b9672433bef02f5d39aa1143ca2c08f311c1d041c464a42be9ae648dc/regex-2026.4.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f94a11a9d05afcfcfa640e096319720a19cc0c9f7768e1a61fceee6a3afc6c7c", size = 494126, upload-time = "2026-04-03T20:54:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/4e/4b/c132a4f4fe18ad3340d89fcb56235132b69559136036b845be3c073142ed/regex-2026.4.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:36bcb9d6d1307ab629edc553775baada2aefa5c50ccc0215fbfd2afcfff43141", size = 293882, upload-time = "2026-04-03T20:54:45.41Z" }, + { url = "https://files.pythonhosted.org/packages/f4/5f/eaa38092ce7a023656280f2341dbbd4ad5f05d780a70abba7bb4f4bea54c/regex-2026.4.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:261c015b3e2ed0919157046d768774ecde57f03d8fa4ba78d29793447f70e717", size = 292334, upload-time = "2026-04-03T20:54:47.051Z" }, + { url = "https://files.pythonhosted.org/packages/5f/f6/dd38146af1392dac33db7074ab331cec23cced3759167735c42c5460a243/regex-2026.4.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c228cf65b4a54583763645dcd73819b3b381ca8b4bb1b349dee1c135f4112c07", size = 811691, upload-time = "2026-04-03T20:54:49.074Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f0/dc54c2e69f5eeec50601054998ec3690d5344277e782bd717e49867c1d29/regex-2026.4.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dd2630faeb6876fb0c287f664d93ddce4d50cd46c6e88e60378c05c9047e08ca", size = 871227, upload-time = "2026-04-03T20:54:51.035Z" }, + { url = "https://files.pythonhosted.org/packages/a1/af/cb16bd5dc61621e27df919a4449bbb7e5a1034c34d307e0a706e9cc0f3e3/regex-2026.4.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6a50ab11b7779b849472337191f3a043e27e17f71555f98d0092fa6d73364520", size = 917435, upload-time = "2026-04-03T20:54:52.994Z" }, + { url = "https://files.pythonhosted.org/packages/5c/71/8b260897f22996b666edd9402861668f45a2ca259f665ac029e6104a2d7d/regex-2026.4.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0734f63afe785138549fbe822a8cfeaccd1bae814c5057cc0ed5b9f2de4fc883", size = 816358, upload-time = "2026-04-03T20:54:54.884Z" }, + { url = "https://files.pythonhosted.org/packages/1c/60/775f7f72a510ef238254906c2f3d737fc80b16ca85f07d20e318d2eea894/regex-2026.4.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c4ee50606cb1967db7e523224e05f32089101945f859928e65657a2cbb3d278b", size = 785549, upload-time = "2026-04-03T20:54:57.01Z" }, + { url = "https://files.pythonhosted.org/packages/58/42/34d289b3627c03cf381e44da534a0021664188fa49ba41513da0b4ec6776/regex-2026.4.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6c1818f37be3ca02dcb76d63f2c7aaba4b0dc171b579796c6fbe00148dfec6b1", size = 801364, upload-time = "2026-04-03T20:54:58.981Z" }, + { url = "https://files.pythonhosted.org/packages/fc/20/f6ecf319b382a8f1ab529e898b222c3f30600fcede7834733c26279e7465/regex-2026.4.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f5bfc2741d150d0be3e4a0401a5c22b06e60acb9aa4daa46d9e79a6dcd0f135b", size = 866221, upload-time = "2026-04-03T20:55:00.88Z" }, + { url = "https://files.pythonhosted.org/packages/92/6a/9f16d3609d549bd96d7a0b2aee1625d7512ba6a03efc01652149ef88e74d/regex-2026.4.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:504ffa8a03609a087cad81277a629b6ce884b51a24bd388a7980ad61748618ff", size = 772530, upload-time = "2026-04-03T20:55:03.213Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f6/aa9768bc96a4c361ac96419fbaf2dcdc33970bb813df3ba9b09d5d7b6d96/regex-2026.4.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:70aadc6ff12e4b444586e57fc30771f86253f9f0045b29016b9605b4be5f7dfb", size = 856989, upload-time = "2026-04-03T20:55:05.087Z" }, + { url = "https://files.pythonhosted.org/packages/4d/b4/c671db3556be2473ae3e4bb7a297c518d281452871501221251ea4ecba57/regex-2026.4.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f4f83781191007b6ef43b03debc35435f10cad9b96e16d147efe84a1d48bdde4", size = 803241, upload-time = "2026-04-03T20:55:07.162Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5c/83e3b1d89fa4f6e5a1bc97b4abd4a9a97b3c1ac7854164f694f5f0ba98a0/regex-2026.4.4-cp313-cp313t-win32.whl", hash = "sha256:e014a797de43d1847df957c0a2a8e861d1c17547ee08467d1db2c370b7568baa", size = 269921, upload-time = "2026-04-03T20:55:09.62Z" }, + { url = "https://files.pythonhosted.org/packages/28/07/077c387121f42cdb4d92b1301133c0d93b5709d096d1669ab847dda9fe2e/regex-2026.4.4-cp313-cp313t-win_amd64.whl", hash = "sha256:b15b88b0d52b179712632832c1d6e58e5774f93717849a41096880442da41ab0", size = 281240, upload-time = "2026-04-03T20:55:11.521Z" }, + { url = "https://files.pythonhosted.org/packages/9d/22/ead4a4abc7c59a4d882662aa292ca02c8b617f30b6e163bc1728879e9353/regex-2026.4.4-cp313-cp313t-win_arm64.whl", hash = "sha256:586b89cdadf7d67bf86ae3342a4dcd2b8d70a832d90c18a0ae955105caf34dbe", size = 272440, upload-time = "2026-04-03T20:55:13.365Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f5/ed97c2dc47b5fbd4b73c0d7d75f9ebc8eca139f2bbef476bba35f28c0a77/regex-2026.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2da82d643fa698e5e5210e54af90181603d5853cf469f5eedf9bfc8f59b4b8c7", size = 490343, upload-time = "2026-04-03T20:55:15.241Z" }, + { url = "https://files.pythonhosted.org/packages/80/e9/de4828a7385ec166d673a5790ad06ac48cdaa98bc0960108dd4b9cc1aef7/regex-2026.4.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:54a1189ad9d9357760557c91103d5e421f0a2dabe68a5cdf9103d0dcf4e00752", size = 291909, upload-time = "2026-04-03T20:55:17.558Z" }, + { url = "https://files.pythonhosted.org/packages/b4/d6/5cfbfc97f3201a4d24b596a77957e092030dcc4205894bc035cedcfce62f/regex-2026.4.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:76d67d5afb1fe402d10a6403bae668d000441e2ab115191a804287d53b772951", size = 289692, upload-time = "2026-04-03T20:55:20.561Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ac/f2212d9fd56fe897e36d0110ba30ba2d247bd6410c5bd98499c7e5a1e1f2/regex-2026.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e7cd3e4ee8d80447a83bbc9ab0c8459781fa77087f856c3e740d7763be0df27f", size = 796979, upload-time = "2026-04-03T20:55:22.56Z" }, + { url = "https://files.pythonhosted.org/packages/c9/e3/a016c12675fbac988a60c7e1c16e67823ff0bc016beb27bd7a001dbdabc6/regex-2026.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e19e18c568d2866d8b6a6dfad823db86193503f90823a8f66689315ba28fbe8", size = 866744, upload-time = "2026-04-03T20:55:24.646Z" }, + { url = "https://files.pythonhosted.org/packages/af/a4/0b90ca4cf17adc3cb43de80ec71018c37c88ad64987e8d0d481a95ca60b5/regex-2026.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7698a6f38730fd1385d390d1ed07bb13dce39aa616aca6a6d89bea178464b9a4", size = 911613, upload-time = "2026-04-03T20:55:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/8e/3b/2b3dac0b82d41ab43aa87c6ecde63d71189d03fe8854b8ca455a315edac3/regex-2026.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:173a66f3651cdb761018078e2d9487f4cf971232c990035ec0eb1cdc6bf929a9", size = 800551, upload-time = "2026-04-03T20:55:29.532Z" }, + { url = "https://files.pythonhosted.org/packages/25/fe/5365eb7aa0e753c4b5957815c321519ecab033c279c60e1b1ae2367fa810/regex-2026.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa7922bbb2cc84fa062d37723f199d4c0cd200245ce269c05db82d904db66b83", size = 776911, upload-time = "2026-04-03T20:55:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b3/7fb0072156bba065e3b778a7bc7b0a6328212be5dd6a86fd207e0c4f2dab/regex-2026.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:59f67cd0a0acaf0e564c20bbd7f767286f23e91e2572c5703bf3e56ea7557edb", size = 785751, upload-time = "2026-04-03T20:55:33.797Z" }, + { url = "https://files.pythonhosted.org/packages/02/1a/9f83677eb699273e56e858f7bd95acdbee376d42f59e8bfca2fd80d79df3/regex-2026.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:475e50f3f73f73614f7cba5524d6de49dee269df00272a1b85e3d19f6d498465", size = 860484, upload-time = "2026-04-03T20:55:35.745Z" }, + { url = "https://files.pythonhosted.org/packages/3b/7a/93937507b61cfcff8b4c5857f1b452852b09f741daa9acae15c971d8554e/regex-2026.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:a1c0c7d67b64d85ac2e1879923bad2f08a08f3004055f2f406ef73c850114bd4", size = 765939, upload-time = "2026-04-03T20:55:37.972Z" }, + { url = "https://files.pythonhosted.org/packages/86/ea/81a7f968a351c6552b1670ead861e2a385be730ee28402233020c67f9e0f/regex-2026.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:1371c2ccbb744d66ee63631cc9ca12aa233d5749972626b68fe1a649dd98e566", size = 851417, upload-time = "2026-04-03T20:55:39.92Z" }, + { url = "https://files.pythonhosted.org/packages/4c/7e/323c18ce4b5b8f44517a36342961a0306e931e499febbd876bb149d900f0/regex-2026.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:59968142787042db793348a3f5b918cf24ced1f23247328530e063f89c128a95", size = 789056, upload-time = "2026-04-03T20:55:42.303Z" }, + { url = "https://files.pythonhosted.org/packages/c0/af/e7510f9b11b1913b0cd44eddb784b2d650b2af6515bfce4cffcc5bfd1d38/regex-2026.4.4-cp314-cp314-win32.whl", hash = "sha256:59efe72d37fd5a91e373e5146f187f921f365f4abc1249a5ab446a60f30dd5f8", size = 272130, upload-time = "2026-04-03T20:55:44.995Z" }, + { url = "https://files.pythonhosted.org/packages/9a/51/57dae534c915e2d3a21490e88836fa2ae79dde3b66255ecc0c0a155d2c10/regex-2026.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:e0aab3ff447845049d676827d2ff714aab4f73f340e155b7de7458cf53baa5a4", size = 280992, upload-time = "2026-04-03T20:55:47.316Z" }, + { url = "https://files.pythonhosted.org/packages/0a/5e/abaf9f4c3792e34edb1434f06717fae2b07888d85cb5cec29f9204931bf8/regex-2026.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:a7a5bb6aa0cf62208bb4fa079b0c756734f8ad0e333b425732e8609bd51ee22f", size = 273563, upload-time = "2026-04-03T20:55:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/ff/06/35da85f9f217b9538b99cbb170738993bcc3b23784322decb77619f11502/regex-2026.4.4-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:97850d0638391bdc7d35dc1c1039974dcb921eaafa8cc935ae4d7f272b1d60b3", size = 494191, upload-time = "2026-04-03T20:55:51.258Z" }, + { url = "https://files.pythonhosted.org/packages/54/5b/1bc35f479eef8285c4baf88d8c002023efdeebb7b44a8735b36195486ae7/regex-2026.4.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ee7337f88f2a580679f7bbfe69dc86c043954f9f9c541012f49abc554a962f2e", size = 293877, upload-time = "2026-04-03T20:55:53.214Z" }, + { url = "https://files.pythonhosted.org/packages/39/5b/f53b9ad17480b3ddd14c90da04bfb55ac6894b129e5dea87bcaf7d00e336/regex-2026.4.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7429f4e6192c11d659900c0648ba8776243bf396ab95558b8c51a345afeddde6", size = 292410, upload-time = "2026-04-03T20:55:55.736Z" }, + { url = "https://files.pythonhosted.org/packages/bb/56/52377f59f60a7c51aa4161eecf0b6032c20b461805aca051250da435ffc9/regex-2026.4.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4f10fbd5dd13dcf4265b4cc07d69ca70280742870c97ae10093e3d66000359", size = 811831, upload-time = "2026-04-03T20:55:57.802Z" }, + { url = "https://files.pythonhosted.org/packages/dd/63/8026310bf066f702a9c361f83a8c9658f3fe4edb349f9c1e5d5273b7c40c/regex-2026.4.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a152560af4f9742b96f3827090f866eeec5becd4765c8e0d3473d9d280e76a5a", size = 871199, upload-time = "2026-04-03T20:56:00.333Z" }, + { url = "https://files.pythonhosted.org/packages/20/9f/a514bbb00a466dbb506d43f187a04047f7be1505f10a9a15615ead5080ee/regex-2026.4.4-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54170b3e95339f415d54651f97df3bff7434a663912f9358237941bbf9143f55", size = 917649, upload-time = "2026-04-03T20:56:02.445Z" }, + { url = "https://files.pythonhosted.org/packages/cb/6b/8399f68dd41a2030218839b9b18360d79b86d22b9fab5ef477c7f23ca67c/regex-2026.4.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:07f190d65f5a72dcb9cf7106bfc3d21e7a49dd2879eda2207b683f32165e4d99", size = 816388, upload-time = "2026-04-03T20:56:04.595Z" }, + { url = "https://files.pythonhosted.org/packages/1e/9c/103963f47c24339a483b05edd568594c2be486188f688c0170fd504b2948/regex-2026.4.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9a2741ce5a29d3c84b0b94261ba630ab459a1b847a0d6beca7d62d188175c790", size = 785746, upload-time = "2026-04-03T20:56:07.13Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ee/7f6054c0dec0cee3463c304405e4ff42e27cff05bf36fcb34be549ab17bd/regex-2026.4.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b26c30df3a28fd9793113dac7385a4deb7294a06c0f760dd2b008bd49a9139bc", size = 801483, upload-time = "2026-04-03T20:56:09.365Z" }, + { url = "https://files.pythonhosted.org/packages/30/c2/51d3d941cf6070dc00c3338ecf138615fc3cce0421c3df6abe97a08af61a/regex-2026.4.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:421439d1bee44b19f4583ccf42670ca464ffb90e9fdc38d37f39d1ddd1e44f1f", size = 866331, upload-time = "2026-04-03T20:56:12.039Z" }, + { url = "https://files.pythonhosted.org/packages/16/e8/76d50dcc122ac33927d939f350eebcfe3dbcbda96913e03433fc36de5e63/regex-2026.4.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:b40379b53ecbc747fd9bdf4a0ea14eb8188ca1bd0f54f78893a39024b28f4863", size = 772673, upload-time = "2026-04-03T20:56:14.558Z" }, + { url = "https://files.pythonhosted.org/packages/a5/6e/5f6bf75e20ea6873d05ba4ec78378c375cbe08cdec571c83fbb01606e563/regex-2026.4.4-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:08c55c13d2eef54f73eeadc33146fb0baaa49e7335eb1aff6ae1324bf0ddbe4a", size = 857146, upload-time = "2026-04-03T20:56:16.663Z" }, + { url = "https://files.pythonhosted.org/packages/0b/33/3c76d9962949e487ebba353a18e89399f292287204ac8f2f4cfc3a51c233/regex-2026.4.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9776b85f510062f5a75ef112afe5f494ef1635607bf1cc220c1391e9ac2f5e81", size = 803463, upload-time = "2026-04-03T20:56:18.923Z" }, + { url = "https://files.pythonhosted.org/packages/19/eb/ef32dcd2cb69b69bc0c3e55205bce94a7def48d495358946bc42186dcccc/regex-2026.4.4-cp314-cp314t-win32.whl", hash = "sha256:385edaebde5db5be103577afc8699fea73a0e36a734ba24870be7ffa61119d74", size = 275709, upload-time = "2026-04-03T20:56:20.996Z" }, + { url = "https://files.pythonhosted.org/packages/a0/86/c291bf740945acbf35ed7dbebf8e2eea2f3f78041f6bd7cdab80cb274dc0/regex-2026.4.4-cp314-cp314t-win_amd64.whl", hash = "sha256:5d354b18839328927832e2fa5f7c95b7a3ccc39e7a681529e1685898e6436d45", size = 285622, upload-time = "2026-04-03T20:56:23.641Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e7/ec846d560ae6a597115153c02ca6138a7877a1748b2072d9521c10a93e58/regex-2026.4.4-cp314-cp314t-win_arm64.whl", hash = "sha256:af0384cb01a33600c49505c27c6c57ab0b27bf84a74e28524c92ca897ebdac9d", size = 275773, upload-time = "2026-04-03T20:56:26.07Z" }, ] [[package]] name = "requests" -version = "2.33.0" +version = "2.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -3359,9 +3756,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/34/64/8860370b167a9721e8956ae116825caff829224fbca0ca6e7bf8ddef8430/requests-2.33.0.tar.gz", hash = "sha256:c7ebc5e8b0f21837386ad0e1c8fe8b829fa5f544d8df3b2253bff14ef29d7652", size = 134232, upload-time = "2026-03-25T15:10:41.586Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017, upload-time = "2026-03-25T15:10:40.382Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" }, ] [[package]] @@ -3470,51 +3867,38 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" }, ] -[[package]] -name = "rsa" -version = "4.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyasn1" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, -] - [[package]] name = "ruff" -version = "0.14.11" +version = "0.15.12" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/77/9a7fe084d268f8855d493e5031ea03fa0af8cc05887f638bf1c4e3363eb8/ruff-0.14.11.tar.gz", hash = "sha256:f6dc463bfa5c07a59b1ff2c3b9767373e541346ea105503b4c0369c520a66958", size = 5993417, upload-time = "2026-01-08T19:11:58.322Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/43/3291f1cc9106f4c63bdce7a8d0df5047fe8422a75b091c16b5e9355e0b11/ruff-0.15.12.tar.gz", hash = "sha256:ecea26adb26b4232c0c2ca19ccbc0083a68344180bba2a600605538ce51a40a6", size = 4643852, upload-time = "2026-04-24T18:17:14.305Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/a6/a4c40a5aaa7e331f245d2dc1ac8ece306681f52b636b40ef87c88b9f7afd/ruff-0.14.11-py3-none-linux_armv6l.whl", hash = "sha256:f6ff2d95cbd335841a7217bdfd9c1d2e44eac2c584197ab1385579d55ff8830e", size = 12951208, upload-time = "2026-01-08T19:12:09.218Z" }, - { url = "https://files.pythonhosted.org/packages/5c/5c/360a35cb7204b328b685d3129c08aca24765ff92b5a7efedbdd6c150d555/ruff-0.14.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f6eb5c1c8033680f4172ea9c8d3706c156223010b8b97b05e82c59bdc774ee6", size = 13330075, upload-time = "2026-01-08T19:12:02.549Z" }, - { url = "https://files.pythonhosted.org/packages/1b/9e/0cc2f1be7a7d33cae541824cf3f95b4ff40d03557b575912b5b70273c9ec/ruff-0.14.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f2fc34cc896f90080fca01259f96c566f74069a04b25b6205d55379d12a6855e", size = 12257809, upload-time = "2026-01-08T19:12:00.366Z" }, - { url = "https://files.pythonhosted.org/packages/a7/e5/5faab97c15bb75228d9f74637e775d26ac703cc2b4898564c01ab3637c02/ruff-0.14.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53386375001773ae812b43205d6064dae49ff0968774e6befe16a994fc233caa", size = 12678447, upload-time = "2026-01-08T19:12:13.899Z" }, - { url = "https://files.pythonhosted.org/packages/1b/33/e9767f60a2bef779fb5855cab0af76c488e0ce90f7bb7b8a45c8a2ba4178/ruff-0.14.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a697737dce1ca97a0a55b5ff0434ee7205943d4874d638fe3ae66166ff46edbe", size = 12758560, upload-time = "2026-01-08T19:11:42.55Z" }, - { url = "https://files.pythonhosted.org/packages/eb/84/4c6cf627a21462bb5102f7be2a320b084228ff26e105510cd2255ea868e5/ruff-0.14.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6845ca1da8ab81ab1dce755a32ad13f1db72e7fba27c486d5d90d65e04d17b8f", size = 13599296, upload-time = "2026-01-08T19:11:30.371Z" }, - { url = "https://files.pythonhosted.org/packages/88/e1/92b5ed7ea66d849f6157e695dc23d5d6d982bd6aa8d077895652c38a7cae/ruff-0.14.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e36ce2fd31b54065ec6f76cb08d60159e1b32bdf08507862e32f47e6dde8bcbf", size = 15048981, upload-time = "2026-01-08T19:12:04.742Z" }, - { url = "https://files.pythonhosted.org/packages/61/df/c1bd30992615ac17c2fb64b8a7376ca22c04a70555b5d05b8f717163cf9f/ruff-0.14.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590bcc0e2097ecf74e62a5c10a6b71f008ad82eb97b0a0079e85defe19fe74d9", size = 14633183, upload-time = "2026-01-08T19:11:40.069Z" }, - { url = "https://files.pythonhosted.org/packages/04/e9/fe552902f25013dd28a5428a42347d9ad20c4b534834a325a28305747d64/ruff-0.14.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53fe71125fc158210d57fe4da26e622c9c294022988d08d9347ec1cf782adafe", size = 14050453, upload-time = "2026-01-08T19:11:37.555Z" }, - { url = "https://files.pythonhosted.org/packages/ae/93/f36d89fa021543187f98991609ce6e47e24f35f008dfe1af01379d248a41/ruff-0.14.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a35c9da08562f1598ded8470fcfef2afb5cf881996e6c0a502ceb61f4bc9c8a3", size = 13757889, upload-time = "2026-01-08T19:12:07.094Z" }, - { url = "https://files.pythonhosted.org/packages/b7/9f/c7fb6ecf554f28709a6a1f2a7f74750d400979e8cd47ed29feeaa1bd4db8/ruff-0.14.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0f3727189a52179393ecf92ec7057c2210203e6af2676f08d92140d3e1ee72c1", size = 13955832, upload-time = "2026-01-08T19:11:55.064Z" }, - { url = "https://files.pythonhosted.org/packages/db/a0/153315310f250f76900a98278cf878c64dfb6d044e184491dd3289796734/ruff-0.14.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:eb09f849bd37147a789b85995ff734a6c4a095bed5fd1608c4f56afc3634cde2", size = 12586522, upload-time = "2026-01-08T19:11:35.356Z" }, - { url = "https://files.pythonhosted.org/packages/2f/2b/a73a2b6e6d2df1d74bf2b78098be1572191e54bec0e59e29382d13c3adc5/ruff-0.14.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:c61782543c1231bf71041461c1f28c64b961d457d0f238ac388e2ab173d7ecb7", size = 12724637, upload-time = "2026-01-08T19:11:47.796Z" }, - { url = "https://files.pythonhosted.org/packages/f0/41/09100590320394401cd3c48fc718a8ba71c7ddb1ffd07e0ad6576b3a3df2/ruff-0.14.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:82ff352ea68fb6766140381748e1f67f83c39860b6446966cff48a315c3e2491", size = 13145837, upload-time = "2026-01-08T19:11:32.87Z" }, - { url = "https://files.pythonhosted.org/packages/3b/d8/e035db859d1d3edf909381eb8ff3e89a672d6572e9454093538fe6f164b0/ruff-0.14.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:728e56879df4ca5b62a9dde2dd0eb0edda2a55160c0ea28c4025f18c03f86984", size = 13850469, upload-time = "2026-01-08T19:12:11.694Z" }, - { url = "https://files.pythonhosted.org/packages/4e/02/bb3ff8b6e6d02ce9e3740f4c17dfbbfb55f34c789c139e9cd91985f356c7/ruff-0.14.11-py3-none-win32.whl", hash = "sha256:337c5dd11f16ee52ae217757d9b82a26400be7efac883e9e852646f1557ed841", size = 12851094, upload-time = "2026-01-08T19:11:45.163Z" }, - { url = "https://files.pythonhosted.org/packages/58/f1/90ddc533918d3a2ad628bc3044cdfc094949e6d4b929220c3f0eb8a1c998/ruff-0.14.11-py3-none-win_amd64.whl", hash = "sha256:f981cea63d08456b2c070e64b79cb62f951aa1305282974d4d5216e6e0178ae6", size = 14001379, upload-time = "2026-01-08T19:11:52.591Z" }, - { url = "https://files.pythonhosted.org/packages/c4/1c/1dbe51782c0e1e9cfce1d1004752672d2d4629ea46945d19d731ad772b3b/ruff-0.14.11-py3-none-win_arm64.whl", hash = "sha256:649fb6c9edd7f751db276ef42df1f3df41c38d67d199570ae2a7bd6cbc3590f0", size = 12938644, upload-time = "2026-01-08T19:11:50.027Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6e/e78ffb61d4686f3d96ba3df2c801161843746dcbcbb17a1e927d4829312b/ruff-0.15.12-py3-none-linux_armv6l.whl", hash = "sha256:f86f176e188e94d6bdbc09f09bfd9dc729059ad93d0e7390b5a73efe19f8861c", size = 10640713, upload-time = "2026-04-24T18:17:22.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/08/a317bc231fb9e7b93e4ef3089501e51922ff88d6936ce5cf870c4fe55419/ruff-0.15.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e3bcd123364c3770b8e1b7baaf343cc99a35f197c5c6e8af79015c666c423a6c", size = 11069267, upload-time = "2026-04-24T18:17:30.105Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a4/f828e9718d3dce1f5f11c39c4f65afd32783c8b2aebb2e3d259e492c47bd/ruff-0.15.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fe87510d000220aa1ed530d4448a7c696a0cae1213e5ec30e5874287b66557b5", size = 10397182, upload-time = "2026-04-24T18:17:07.177Z" }, + { url = "https://files.pythonhosted.org/packages/71/e0/3310fc6d1b5e1fdea22bf3b1b807c7e187b581021b0d7d4514cccdb5fb71/ruff-0.15.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84a1630093121375a3e2a95b4a6dc7b59e2b4ee76216e32d81aae550a832d002", size = 10758012, upload-time = "2026-04-24T18:16:55.759Z" }, + { url = "https://files.pythonhosted.org/packages/11/c1/a606911aee04c324ddaa883ae418f3569792fd3c4a10c50e0dd0a2311e1e/ruff-0.15.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb129f40f114f089ebe0ca56c0d251cf2061b17651d464bb6478dc01e69f11f5", size = 10447479, upload-time = "2026-04-24T18:16:51.677Z" }, + { url = "https://files.pythonhosted.org/packages/9d/68/4201e8444f0894f21ab4aeeaee68aa4f10b51613514a20d80bd628d57e88/ruff-0.15.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0c862b172d695db7598426b8af465e7e9ac00a3ea2a3630ee67eb82e366aaa6", size = 11234040, upload-time = "2026-04-24T18:17:16.529Z" }, + { url = "https://files.pythonhosted.org/packages/34/ff/8a6d6cf4ccc23fd67060874e832c18919d1557a0611ebef03fdb01fff11e/ruff-0.15.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2849ea9f3484c3aca43a82f484210370319e7170df4dfe4843395ddf6c57bc33", size = 12087377, upload-time = "2026-04-24T18:17:04.944Z" }, + { url = "https://files.pythonhosted.org/packages/85/f6/c669cf73f5152f623d34e69866a46d5e6185816b19fcd5b6dd8a2d299922/ruff-0.15.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e77c7e51c07fe396826d5969a5b846d9cd4c402535835fb6e21ce8b28fef847", size = 11367784, upload-time = "2026-04-24T18:17:25.409Z" }, + { url = "https://files.pythonhosted.org/packages/e8/39/c61d193b8a1daaa8977f7dea9e8d8ba866e02ea7b65d32f6861693aa4c12/ruff-0.15.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b2f4f2f3b1026b5fb449b467d9264bf22067b600f7b6f41fc5958909f449d0", size = 11344088, upload-time = "2026-04-24T18:17:12.258Z" }, + { url = "https://files.pythonhosted.org/packages/c2/8d/49afab3645e31e12c590acb6d3b5b69d7aab5b81926dbaf7461f9441f37a/ruff-0.15.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9ba3b8f1afd7e2e43d8943e55f249e13f9682fde09711644a6e7290eb4f3e339", size = 11271770, upload-time = "2026-04-24T18:17:02.457Z" }, + { url = "https://files.pythonhosted.org/packages/46/06/33f41fe94403e2b755481cdfb9b7ef3e4e0ed031c4581124658d935d52b4/ruff-0.15.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e852ba9fdc890655e1d78f2df1499efbe0e54126bd405362154a75e2bde159c5", size = 10719355, upload-time = "2026-04-24T18:17:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/0d/59/18aa4e014debbf559670e4048e39260a85c7fcee84acfd761ac01e7b8d35/ruff-0.15.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dd8aed930da53780d22fc70bdf84452c843cf64f8cb4eb38984319c24c5cd5fd", size = 10462758, upload-time = "2026-04-24T18:17:32.347Z" }, + { url = "https://files.pythonhosted.org/packages/25/e7/cc9f16fd0f3b5fddcbd7ec3d6ae30c8f3fde1047f32a4093a98d633c6570/ruff-0.15.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:01da3988d225628b709493d7dc67c3b9b12c0210016b08690ef9bd27970b262b", size = 10953498, upload-time = "2026-04-24T18:17:20.674Z" }, + { url = "https://files.pythonhosted.org/packages/72/7a/a9ba7f98c7a575978698f4230c5e8cc54bbc761af34f560818f933dafa0c/ruff-0.15.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9cae0f92bd5700d1213188b31cd3bdd2b315361296d10b96b8e2337d3d11f53e", size = 11447765, upload-time = "2026-04-24T18:17:09.755Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f9/0ae446942c846b8266059ad8a30702a35afae55f5cdc54c5adf8d7afdc27/ruff-0.15.12-py3-none-win32.whl", hash = "sha256:d0185894e038d7043ba8fd6aee7499ece6462dc0ea9f1e260c7451807c714c20", size = 10657277, upload-time = "2026-04-24T18:17:18.591Z" }, + { url = "https://files.pythonhosted.org/packages/33/f1/9614e03e1cdcbf9437570b5400ced8a720b5db22b28d8e0f1bda429f660d/ruff-0.15.12-py3-none-win_amd64.whl", hash = "sha256:c87a162d61ab3adca47c03f7f717c68672edec7d1b5499e652331780fe74950d", size = 11837758, upload-time = "2026-04-24T18:17:00.113Z" }, + { url = "https://files.pythonhosted.org/packages/c0/98/6beb4b351e472e5f4c4613f7c35a5290b8be2497e183825310c4c3a3984b/ruff-0.15.12-py3-none-win_arm64.whl", hash = "sha256:a538f7a82d061cee7be55542aca1d86d1393d55d81d4fcc314370f4340930d4f", size = 11120821, upload-time = "2026-04-24T18:16:57.979Z" }, ] [[package]] name = "setuptools" -version = "82.0.0" +version = "82.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/82/f3/748f4d6f65d1756b9ae577f329c951cda23fb900e4de9f70900ced962085/setuptools-82.0.0.tar.gz", hash = "sha256:22e0a2d69474c6ae4feb01951cb69d515ed23728cf96d05513d36e42b62b37cb", size = 1144893, upload-time = "2026-02-08T15:08:40.206Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/db/cfac1baf10650ab4d1c111714410d2fbb77ac5a616db26775db562c8fab2/setuptools-82.0.1.tar.gz", hash = "sha256:7d872682c5d01cfde07da7bccc7b65469d3dca203318515ada1de5eda35efbf9", size = 1152316, upload-time = "2026-03-09T12:47:17.221Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/c6/76dc613121b793286a3f91621d7b75a2b493e0390ddca50f11993eadf192/setuptools-82.0.0-py3-none-any.whl", hash = "sha256:70b18734b607bd1da571d097d236cfcfacaf01de45717d59e6e04b96877532e0", size = 1003468, upload-time = "2026-02-08T15:08:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/9d/76/f789f7a86709c6b087c5a2f52f911838cad707cc613162401badc665acfe/setuptools-82.0.1-py3-none-any.whl", hash = "sha256:a59e362652f08dcd477c78bb6e7bd9d80a7995bc73ce773050228a348ce2e5bb", size = 1006223, upload-time = "2026-03-09T12:47:15.026Z" }, ] [[package]] @@ -3528,11 +3912,11 @@ wheels = [ [[package]] name = "slack-sdk" -version = "3.40.1" +version = "3.41.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3a/18/784859b33a3f9c8cdaa1eda4115eb9fe72a0a37304718887d12991eeb2fd/slack_sdk-3.40.1.tar.gz", hash = "sha256:a215333bc251bc90abf5f5110899497bf61a3b5184b6d9ee35d73ebf09ec3fd0", size = 250379, upload-time = "2026-02-18T22:11:01.819Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/35/fc009118a13187dd9731657c60138e5a7c2dea88681a7f04dc406af5da7d/slack_sdk-3.41.0.tar.gz", hash = "sha256:eb61eb12a65bebeca9cb5d36b3f799e836ed2be21b456d15df2627cfe34076ca", size = 250568, upload-time = "2026-03-12T16:10:11.381Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/e1/bb81f93c9f403e3b573c429dd4838ec9b44e4ef35f3b0759eb49557ab6e3/slack_sdk-3.40.1-py2.py3-none-any.whl", hash = "sha256:cd8902252979aa248092b0d77f3a9ea3cc605bc5d53663ad728e892e26e14a65", size = 313687, upload-time = "2026-02-18T22:11:00.027Z" }, + { url = "https://files.pythonhosted.org/packages/a1/df/2e4be347ff98281b505cc0ccf141408cdd25eb5ca9f3830deb361b2472d3/slack_sdk-3.41.0-py2.py3-none-any.whl", hash = "sha256:bb18dcdfff1413ec448e759cf807ec3324090993d8ab9111c74081623b692a89", size = 313885, upload-time = "2026-03-12T16:10:09.811Z" }, ] [[package]] @@ -3555,51 +3939,101 @@ wheels = [ [[package]] name = "soupsieve" -version = "2.8.1" +version = "2.8.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/89/23/adf3796d740536d63a6fbda113d07e60c734b6ed5d3058d1e47fc0495e47/soupsieve-2.8.1.tar.gz", hash = "sha256:4cf733bc50fa805f5df4b8ef4740fc0e0fa6218cf3006269afd3f9d6d80fd350", size = 117856, upload-time = "2025-12-18T13:50:34.655Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/f3/b67d6ea49ca9154453b6d70b34ea22f3996b9fa55da105a79d8732227adc/soupsieve-2.8.1-py3-none-any.whl", hash = "sha256:a11fe2a6f3d76ab3cf2de04eb339c1be5b506a8a47f2ceb6d139803177f85434", size = 36710, upload-time = "2025-12-18T13:50:33.267Z" }, + { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" }, ] [[package]] name = "speechrecognition" -version = "3.14.5" +version = "3.16.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "audioop-lts", marker = "python_full_version >= '3.13'" }, { name = "standard-aifc", marker = "python_full_version >= '3.13'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/ab/bb1c60e7bfd6b7a736f76439b78ebbfb5e92a81b626b6e94a87e166f2ea4/speechrecognition-3.14.5.tar.gz", hash = "sha256:2d185192986b9b67a1502825a330e971f59a2cae0262f727a19ad1f6b586d00a", size = 32859817, upload-time = "2025-12-31T11:25:46.518Z" } +sdist = { url = "https://files.pythonhosted.org/packages/be/29/5e0c0ec70c749e4f9fd5b175d1b8db1e20c7b55124e8dd6b5e3941231a9d/speechrecognition-3.16.1.tar.gz", hash = "sha256:6e0e5a326825de99c20da129fd5536bdae899faafa137bea905403c7c8dd47ec", size = 32856001, upload-time = "2026-04-24T15:23:52.394Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/a7/903429719d39ac2c42aa37086c90e816d883560f13c87d51f09a2962e021/speechrecognition-3.14.5-py3-none-any.whl", hash = "sha256:0c496d74e9f29b1daadb0d96f5660f47563e42bf09316dacdd57094c5095977e", size = 32856308, upload-time = "2025-12-31T11:25:41.161Z" }, + { url = "https://files.pythonhosted.org/packages/01/9f/3ee184f8543d80e61d53ca588fd32cddc192c947e03c6c4749aab9c9cf2d/speechrecognition-3.16.1-py3-none-any.whl", hash = "sha256:b27ee50422ecee9f6837faeaa2d0937c6ea6d7e1b9dbc00a90ebc0f745cceda9", size = 32853269, upload-time = "2026-04-24T15:23:48.436Z" }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.49" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/45/461788f35e0364a8da7bda51a1fe1b09762d0c32f12f63727998d85a873b/sqlalchemy-2.0.49.tar.gz", hash = "sha256:d15950a57a210e36dd4cec1aac22787e2a4d57ba9318233e2ef8b2daf9ff2d5f", size = 9898221, upload-time = "2026-04-03T16:38:11.704Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/b3/2de412451330756aaaa72d27131db6dde23995efe62c941184e15242a5fa/sqlalchemy-2.0.49-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4bbccb45260e4ff1b7db0be80a9025bb1e6698bdb808b83fff0000f7a90b2c0b", size = 2157681, upload-time = "2026-04-03T16:53:07.132Z" }, + { url = "https://files.pythonhosted.org/packages/50/84/b2a56e2105bd11ebf9f0b93abddd748e1a78d592819099359aa98134a8bf/sqlalchemy-2.0.49-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb37f15714ec2652d574f021d479e78cd4eb9d04396dca36568fdfffb3487982", size = 3338976, upload-time = "2026-04-03T17:07:40Z" }, + { url = "https://files.pythonhosted.org/packages/2c/fa/65fcae2ed62f84ab72cf89536c7c3217a156e71a2c111b1305ab6f0690e2/sqlalchemy-2.0.49-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb9ec6436a820a4c006aad1ac351f12de2f2dbdaad171692ee457a02429b672", size = 3351937, upload-time = "2026-04-03T17:12:23.374Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2f/6fd118563572a7fe475925742eb6b3443b2250e346a0cc27d8d408e73773/sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8d6efc136f44a7e8bc8088507eaabbb8c2b55b3dbb63fe102c690da0ddebe55e", size = 3281646, upload-time = "2026-04-03T17:07:41.949Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d7/410f4a007c65275b9cf82354adb4bb8ba587b176d0a6ee99caa16fe638f8/sqlalchemy-2.0.49-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e06e617e3d4fd9e51d385dfe45b077a41e9d1b033a7702551e3278ac597dc750", size = 3316695, upload-time = "2026-04-03T17:12:25.642Z" }, + { url = "https://files.pythonhosted.org/packages/d9/95/81f594aa60ded13273a844539041ccf1e66c5a7bed0a8e27810a3b52d522/sqlalchemy-2.0.49-cp312-cp312-win32.whl", hash = "sha256:83101a6930332b87653886c01d1ee7e294b1fe46a07dd9a2d2b4f91bcc88eec0", size = 2117483, upload-time = "2026-04-03T17:05:40.896Z" }, + { url = "https://files.pythonhosted.org/packages/47/9e/fd90114059175cac64e4fafa9bf3ac20584384d66de40793ae2e2f26f3bb/sqlalchemy-2.0.49-cp312-cp312-win_amd64.whl", hash = "sha256:618a308215b6cececb6240b9abde545e3acdabac7ae3e1d4e666896bf5ba44b4", size = 2144494, upload-time = "2026-04-03T17:05:42.282Z" }, + { url = "https://files.pythonhosted.org/packages/ae/81/81755f50eb2478eaf2049728491d4ea4f416c1eb013338682173259efa09/sqlalchemy-2.0.49-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df2d441bacf97022e81ad047e1597552eb3f83ca8a8f1a1fdd43cd7fe3898120", size = 2154547, upload-time = "2026-04-03T16:53:08.64Z" }, + { url = "https://files.pythonhosted.org/packages/a2/bc/3494270da80811d08bcfa247404292428c4fe16294932bce5593f215cad9/sqlalchemy-2.0.49-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8e20e511dc15265fb433571391ba313e10dd8ea7e509d51686a51313b4ac01a2", size = 3280782, upload-time = "2026-04-03T17:07:43.508Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f5/038741f5e747a5f6ea3e72487211579d8cbea5eb9827a9cbd61d0108c4bd/sqlalchemy-2.0.49-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47604cb2159f8bbd5a1ab48a714557156320f20871ee64d550d8bf2683d980d3", size = 3297156, upload-time = "2026-04-03T17:12:27.697Z" }, + { url = "https://files.pythonhosted.org/packages/88/50/a6af0ff9dc954b43a65ca9b5367334e45d99684c90a3d3413fc19a02d43c/sqlalchemy-2.0.49-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:22d8798819f86720bc646ab015baff5ea4c971d68121cb36e2ebc2ee43ead2b7", size = 3228832, upload-time = "2026-04-03T17:07:45.38Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d1/5f6bdad8de0bf546fc74370939621396515e0cdb9067402d6ba1b8afbe9a/sqlalchemy-2.0.49-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9b1c058c171b739e7c330760044803099c7fff11511e3ab3573e5327116a9c33", size = 3267000, upload-time = "2026-04-03T17:12:29.657Z" }, + { url = "https://files.pythonhosted.org/packages/f7/30/ad62227b4a9819a5e1c6abff77c0f614fa7c9326e5a3bdbee90f7139382b/sqlalchemy-2.0.49-cp313-cp313-win32.whl", hash = "sha256:a143af2ea6672f2af3f44ed8f9cd020e9cc34c56f0e8db12019d5d9ecf41cb3b", size = 2115641, upload-time = "2026-04-03T17:05:43.989Z" }, + { url = "https://files.pythonhosted.org/packages/17/3a/7215b1b7d6d49dc9a87211be44562077f5f04f9bb5a59552c1c8e2d98173/sqlalchemy-2.0.49-cp313-cp313-win_amd64.whl", hash = "sha256:12b04d1db2663b421fe072d638a138460a51d5a862403295671c4f3987fb9148", size = 2141498, upload-time = "2026-04-03T17:05:45.7Z" }, + { url = "https://files.pythonhosted.org/packages/28/4b/52a0cb2687a9cd1648252bb257be5a1ba2c2ded20ba695c65756a55a15a4/sqlalchemy-2.0.49-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24bd94bb301ec672d8f0623eba9226cc90d775d25a0c92b5f8e4965d7f3a1518", size = 3560807, upload-time = "2026-04-03T16:58:31.666Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d8/fda95459204877eed0458550d6c7c64c98cc50c2d8d618026737de9ed41a/sqlalchemy-2.0.49-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a51d3db74ba489266ef55c7a4534eb0b8db9a326553df481c11e5d7660c8364d", size = 3527481, upload-time = "2026-04-03T17:06:00.155Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0a/2aac8b78ac6487240cf7afef8f203ca783e8796002dc0cf65c4ee99ff8bb/sqlalchemy-2.0.49-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:55250fe61d6ebfd6934a272ee16ef1244e0f16b7af6cd18ab5b1fc9f08631db0", size = 3468565, upload-time = "2026-04-03T16:58:33.414Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/ce71cfa82c50a373fd2148b3c870be05027155ce791dc9a5dcf439790b8b/sqlalchemy-2.0.49-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:46796877b47034b559a593d7e4b549aba151dae73f9e78212a3478161c12ab08", size = 3477769, upload-time = "2026-04-03T17:06:02.787Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e8/0a9f5c1f7c6f9ca480319bf57c2d7423f08d31445974167a27d14483c948/sqlalchemy-2.0.49-cp313-cp313t-win32.whl", hash = "sha256:9c4969a86e41454f2858256c39bdfb966a20961e9b58bf8749b65abf447e9a8d", size = 2143319, upload-time = "2026-04-03T17:02:04.328Z" }, + { url = "https://files.pythonhosted.org/packages/0e/51/fb5240729fbec73006e137c4f7a7918ffd583ab08921e6ff81a999d6517a/sqlalchemy-2.0.49-cp313-cp313t-win_amd64.whl", hash = "sha256:b9870d15ef00e4d0559ae10ee5bc71b654d1f20076dbe8bc7ed19b4c0625ceba", size = 2175104, upload-time = "2026-04-03T17:02:05.989Z" }, + { url = "https://files.pythonhosted.org/packages/55/33/bf28f618c0a9597d14e0b9ee7d1e0622faff738d44fe986ee287cdf1b8d0/sqlalchemy-2.0.49-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:233088b4b99ebcbc5258c755a097aa52fbf90727a03a5a80781c4b9c54347a2e", size = 2156356, upload-time = "2026-04-03T16:53:09.914Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a7/5f476227576cb8644650eff68cc35fa837d3802b997465c96b8340ced1e2/sqlalchemy-2.0.49-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57ca426a48eb2c682dae8204cd89ea8ab7031e2675120a47924fabc7caacbc2a", size = 3276486, upload-time = "2026-04-03T17:07:46.9Z" }, + { url = "https://files.pythonhosted.org/packages/2e/84/efc7c0bf3a1c5eef81d397f6fddac855becdbb11cb38ff957888603014a7/sqlalchemy-2.0.49-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:685e93e9c8f399b0c96a624799820176312f5ceef958c0f88215af4013d29066", size = 3281479, upload-time = "2026-04-03T17:12:32.226Z" }, + { url = "https://files.pythonhosted.org/packages/91/68/bb406fa4257099c67bd75f3f2261b129c63204b9155de0d450b37f004698/sqlalchemy-2.0.49-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e0400fa22f79acc334d9a6b185dc00a44a8e6578aa7e12d0ddcd8434152b187", size = 3226269, upload-time = "2026-04-03T17:07:48.678Z" }, + { url = "https://files.pythonhosted.org/packages/67/84/acb56c00cca9f251f437cb49e718e14f7687505749ea9255d7bd8158a6df/sqlalchemy-2.0.49-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a05977bffe9bffd2229f477fa75eabe3192b1b05f408961d1bebff8d1cd4d401", size = 3248260, upload-time = "2026-04-03T17:12:34.381Z" }, + { url = "https://files.pythonhosted.org/packages/56/19/6a20ea25606d1efd7bd1862149bb2a22d1451c3f851d23d887969201633f/sqlalchemy-2.0.49-cp314-cp314-win32.whl", hash = "sha256:0f2fa354ba106eafff2c14b0cc51f22801d1e8b2e4149342023bd6f0955de5f5", size = 2118463, upload-time = "2026-04-03T17:05:47.093Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4f/8297e4ed88e80baa1f5aa3c484a0ee29ef3c69c7582f206c916973b75057/sqlalchemy-2.0.49-cp314-cp314-win_amd64.whl", hash = "sha256:77641d299179c37b89cf2343ca9972c88bb6eef0d5fc504a2f86afd15cd5adf5", size = 2144204, upload-time = "2026-04-03T17:05:48.694Z" }, + { url = "https://files.pythonhosted.org/packages/1f/33/95e7216df810c706e0cd3655a778604bbd319ed4f43333127d465a46862d/sqlalchemy-2.0.49-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c1dc3368794d522f43914e03312202523cc89692f5389c32bea0233924f8d977", size = 3565474, upload-time = "2026-04-03T16:58:35.128Z" }, + { url = "https://files.pythonhosted.org/packages/0c/a4/ed7b18d8ccf7f954a83af6bb73866f5bc6f5636f44c7731fbb741f72cc4f/sqlalchemy-2.0.49-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c821c47ecfe05cc32140dcf8dc6fd5d21971c86dbd56eabfe5ba07a64910c01", size = 3530567, upload-time = "2026-04-03T17:06:04.587Z" }, + { url = "https://files.pythonhosted.org/packages/73/a3/20faa869c7e21a827c4a2a42b41353a54b0f9f5e96df5087629c306df71e/sqlalchemy-2.0.49-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9c04bff9a5335eb95c6ecf1c117576a0aa560def274876fd156cfe5510fccc61", size = 3474282, upload-time = "2026-04-03T16:58:37.131Z" }, + { url = "https://files.pythonhosted.org/packages/b7/50/276b9a007aa0764304ad467eceb70b04822dc32092492ee5f322d559a4dc/sqlalchemy-2.0.49-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7f605a456948c35260e7b2a39f8952a26f077fd25653c37740ed186b90aaa68a", size = 3480406, upload-time = "2026-04-03T17:06:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/c3/c80fcdb41905a2df650c2a3e0337198b6848876e63d66fe9188ef9003d24/sqlalchemy-2.0.49-cp314-cp314t-win32.whl", hash = "sha256:6270d717b11c5476b0cbb21eedc8d4dbb7d1a956fd6c15a23e96f197a6193158", size = 2149151, upload-time = "2026-04-03T17:02:07.281Z" }, + { url = "https://files.pythonhosted.org/packages/05/52/9f1a62feab6ed368aff068524ff414f26a6daebc7361861035ae00b05530/sqlalchemy-2.0.49-cp314-cp314t-win_amd64.whl", hash = "sha256:275424295f4256fd301744b8f335cff367825d270f155d522b30c7bf49903ee7", size = 2184178, upload-time = "2026-04-03T17:02:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/e5/30/8519fdde58a7bdf155b714359791ad1dc018b47d60269d5d160d311fdc36/sqlalchemy-2.0.49-py3-none-any.whl", hash = "sha256:ec44cfa7ef1a728e88ad41674de50f6db8cfdb3e2af84af86e0041aaf02d43d0", size = 1942158, upload-time = "2026-04-03T16:53:44.135Z" }, +] + +[package.optional-dependencies] +asyncio = [ + { name = "greenlet" }, ] [[package]] name = "sqlite-vec" -version = "0.1.6" +version = "0.1.9" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/ed/aabc328f29ee6814033d008ec43e44f2c595447d9cccd5f2aabe60df2933/sqlite_vec-0.1.6-py3-none-macosx_10_6_x86_64.whl", hash = "sha256:77491bcaa6d496f2acb5cc0d0ff0b8964434f141523c121e313f9a7d8088dee3", size = 164075, upload-time = "2024-11-20T16:40:29.847Z" }, - { url = "https://files.pythonhosted.org/packages/a7/57/05604e509a129b22e303758bfa062c19afb020557d5e19b008c64016704e/sqlite_vec-0.1.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fdca35f7ee3243668a055255d4dee4dea7eed5a06da8cad409f89facf4595361", size = 165242, upload-time = "2024-11-20T16:40:31.206Z" }, - { url = "https://files.pythonhosted.org/packages/f2/48/dbb2cc4e5bad88c89c7bb296e2d0a8df58aab9edc75853728c361eefc24f/sqlite_vec-0.1.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b0519d9cd96164cd2e08e8eed225197f9cd2f0be82cb04567692a0a4be02da3", size = 103704, upload-time = "2024-11-20T16:40:33.729Z" }, - { url = "https://files.pythonhosted.org/packages/80/76/97f33b1a2446f6ae55e59b33869bed4eafaf59b7f4c662c8d9491b6a714a/sqlite_vec-0.1.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux1_x86_64.whl", hash = "sha256:823b0493add80d7fe82ab0fe25df7c0703f4752941aee1c7b2b02cec9656cb24", size = 151556, upload-time = "2024-11-20T16:40:35.387Z" }, - { url = "https://files.pythonhosted.org/packages/6a/98/e8bc58b178266eae2fcf4c9c7a8303a8d41164d781b32d71097924a6bebe/sqlite_vec-0.1.6-py3-none-win_amd64.whl", hash = "sha256:c65bcfd90fa2f41f9000052bcb8bb75d38240b2dae49225389eca6c3136d3f0c", size = 281540, upload-time = "2024-11-20T16:40:37.296Z" }, + { url = "https://files.pythonhosted.org/packages/68/85/9fad0045d8e7c8df3e0fa5a56c630e8e15ad6e5ca2e6106fceb666aa6638/sqlite_vec-0.1.9-py3-none-macosx_10_6_x86_64.whl", hash = "sha256:1b62a7f0a060d9475575d4e599bbf94a13d85af896bc1ce86ee80d1b5b48e5fb", size = 131171, upload-time = "2026-03-31T08:02:31.717Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3d/3677e0cd2f92e5ebc43cd29fbf565b75582bff1ccfa0b8327c7508e1084f/sqlite_vec-0.1.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1d52e30513bae4cc9778ddbf6145610434081be4c3afe57cd877893bad9f6b6c", size = 165434, upload-time = "2026-03-31T08:02:32.712Z" }, + { url = "https://files.pythonhosted.org/packages/00/d4/f2b936d3bdc38eadcbd2a87875815db36430fab0363182ba5d12cd8e0b51/sqlite_vec-0.1.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e921e592f24a5f9a18f590b6ddd530eb637e2d474e3b1972f9bbeb773aa3cb9", size = 160076, upload-time = "2026-03-31T08:02:33.796Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ad/6afd073b0f817b3e03f9e37ad626ae341805891f23c74b5292818f49ac63/sqlite_vec-0.1.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux1_x86_64.whl", hash = "sha256:1515727990b49e79bcaf75fdee2ffc7d461f8b66905013231251f1c8938e7786", size = 163388, upload-time = "2026-03-31T08:02:34.888Z" }, + { url = "https://files.pythonhosted.org/packages/42/89/81b2907cda14e566b9bf215e2ad82fc9b349edf07d2010756ffdb902f328/sqlite_vec-0.1.9-py3-none-win_amd64.whl", hash = "sha256:4a28dc12fa4b53d7b1dced22da2488fade444e96b5d16fd2d698cd670675cf32", size = 292804, upload-time = "2026-03-31T08:02:36.035Z" }, ] [[package]] name = "sse-starlette" -version = "2.1.3" +version = "3.3.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "starlette" }, - { name = "uvicorn" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/fc/56ab9f116b2133521f532fce8d03194cf04dcac25f583cf3d839be4c0496/sse_starlette-2.1.3.tar.gz", hash = "sha256:9cd27eb35319e1414e3d2558ee7414487f9529ce3b3cf9b21434fd110e017169", size = 19678, upload-time = "2024-08-01T08:52:50.248Z" } +sdist = { url = "https://files.pythonhosted.org/packages/26/8c/f9290339ef6d79badbc010f067cd769d6601ec11a57d78569c683fb4dd87/sse_starlette-3.3.4.tar.gz", hash = "sha256:aaf92fc067af8a5427192895ac028e947b484ac01edbc3caf00e7e7137c7bef1", size = 32427, upload-time = "2026-03-29T09:00:23.307Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/aa/36b271bc4fa1d2796311ee7c7283a3a1c348bad426d37293609ca4300eef/sse_starlette-2.1.3-py3-none-any.whl", hash = "sha256:8ec846438b4665b9e8c560fcdea6bc8081a3abf7942faa95e5a744999d219772", size = 9383, upload-time = "2024-08-01T08:52:48.659Z" }, + { url = "https://files.pythonhosted.org/packages/f8/7f/3de5402f39890ac5660b86bcf5c03f9d855dad5c4ed764866d7b592b46fd/sse_starlette-3.3.4-py3-none-any.whl", hash = "sha256:84bb06e58939a8b38d8341f1bc9792f06c2b53f48c608dd207582b664fc8f3c1", size = 14330, upload-time = "2026-03-29T09:00:21.846Z" }, ] [[package]] @@ -3626,15 +4060,15 @@ wheels = [ [[package]] name = "starlette" -version = "0.50.0" +version = "1.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" } +sdist = { url = "https://files.pythonhosted.org/packages/81/69/17425771797c36cded50b7fe44e850315d039f28b15901ab44839e70b593/starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149", size = 2655289, upload-time = "2026-03-22T18:29:46.779Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" }, + { url = "https://files.pythonhosted.org/packages/0b/c9/584bc9651441b4ba60cc4d557d8a547b5aff901af35bda3a4ee30c819b82/starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b", size = 72651, upload-time = "2026-03-22T18:29:45.111Z" }, ] [[package]] @@ -3669,25 +4103,25 @@ wheels = [ [[package]] name = "tavily-python" -version = "0.7.17" +version = "0.7.23" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, { name = "requests" }, { name = "tiktoken" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5f/eb/d7371ee68119380ab6561c6998eacf3031327ba89c6081d36128ab4a2184/tavily_python-0.7.17.tar.gz", hash = "sha256:437ba064639dfdce1acdbc37cbb73246abe500ab735e988a4b8698a8d5fb7df7", size = 21321, upload-time = "2025-12-17T17:08:39.3Z" } +sdist = { url = "https://files.pythonhosted.org/packages/89/d1/197419d6133643848514e5e84e8f41886e825b73bf91ae235a1595c964f5/tavily_python-0.7.23.tar.gz", hash = "sha256:3b92232e0e29ab68898b765f281bb4f2c650b02210b64affbc48e15292e96161", size = 25968, upload-time = "2026-03-09T19:17:32.333Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/ce/88565f0c9f7654bc90e19f1e76b3bffee7ff9c1741a2124ec2f2900fb080/tavily_python-0.7.17-py3-none-any.whl", hash = "sha256:a2725b9cba71e404e73d19ff277df916283c10100137c336e07f8e1bd7789fcf", size = 18214, upload-time = "2025-12-17T17:08:38.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/27/f9c6e9249367be0772fb754849e03cbbc6ad8d80a479bf30ea8811828b2e/tavily_python-0.7.23-py3-none-any.whl", hash = "sha256:52ef85c44b926bce3f257570cd32bc1bd4db54666acf3105617f27411a59e188", size = 19079, upload-time = "2026-03-09T19:17:29.593Z" }, ] [[package]] name = "tenacity" -version = "9.1.2" +version = "9.1.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" } +sdist = { url = "https://files.pythonhosted.org/packages/47/c6/ee486fd809e357697ee8a44d3d69222b344920433d3b6666ccd9b374630c/tenacity-9.1.4.tar.gz", hash = "sha256:adb31d4c263f2bd041081ab33b498309a57c77f9acf2db65aadf0898179cf93a", size = 49413, upload-time = "2026-02-07T10:45:33.841Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, + { url = "https://files.pythonhosted.org/packages/d7/c1/eb8f9debc45d3b7918a32ab756658a0904732f75e555402972246b0b8e71/tenacity-9.1.4-py3-none-any.whl", hash = "sha256:6095a360c919085f28c6527de529e76a06ad89b23659fa881ae0649b867a9d55", size = 28926, upload-time = "2026-02-07T10:45:32.24Z" }, ] [[package]] @@ -3739,14 +4173,14 @@ wheels = [ [[package]] name = "tqdm" -version = "4.67.1" +version = "4.67.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +sdist = { url = "https://files.pythonhosted.org/packages/09/a9/6ba95a270c6f1fbcd8dac228323f2777d886cb206987444e4bce66338dd4/tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb", size = 169598, upload-time = "2026-02-03T17:35:53.048Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, + { url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" }, ] [[package]] @@ -3781,11 +4215,11 @@ wheels = [ [[package]] name = "tzdata" -version = "2025.3" +version = "2026.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/19/1b9b0e29f30c6d35cb345486df41110984ea67ae69dddbc0e8a100999493/tzdata-2026.2.tar.gz", hash = "sha256:9173fde7d80d9018e02a662e168e5a2d04f87c41ea174b139fbef642eda62d10", size = 198254, upload-time = "2026-04-24T15:22:08.651Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, + { url = "https://files.pythonhosted.org/packages/ce/e4/dccd7f47c4b64213ac01ef921a1337ee6e30e8c6466046018326977efd95/tzdata-2026.2-py2.py3-none-any.whl", hash = "sha256:bbe9af844f658da81a5f95019480da3a89415801f6cc966806612cc7169bffe7", size = 349321, upload-time = "2026-04-24T15:22:05.876Z" }, ] [[package]] @@ -3799,37 +4233,37 @@ wheels = [ [[package]] name = "uuid-utils" -version = "0.13.0" +version = "0.14.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8a/17b11768dcb473d3a255c02ffdd94fbd1b345c906efea0a39124dcbaed52/uuid_utils-0.13.0.tar.gz", hash = "sha256:4c17df6427a9e23a4cd7fb9ee1efb53b8abb078660b9bdb2524ca8595022dfe1", size = 21921, upload-time = "2026-01-08T15:48:10.841Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/d1/38a573f0c631c062cf42fa1f5d021d4dd3c31fb23e4376e4b56b0c9fbbed/uuid_utils-0.14.1.tar.gz", hash = "sha256:9bfc95f64af80ccf129c604fb6b8ca66c6f256451e32bc4570f760e4309c9b69", size = 22195, upload-time = "2026-02-20T22:50:38.833Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/b8/d40848ca22781f206c60a1885fc737d2640392bd6b5792d455525accd89c/uuid_utils-0.13.0-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:83628283e977fb212e756bc055df8fdd2f9f589a2e539ba1abe755b8ce8df7a4", size = 602130, upload-time = "2026-01-08T15:47:34.877Z" }, - { url = "https://files.pythonhosted.org/packages/40/b9/00a944b8096632ea12638181f8e294abcde3e3b8b5e29b777f809896f6ae/uuid_utils-0.13.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c47638ed6334ab19d80f73664f153b04bbb04ab8ce4298d10da6a292d4d21c47", size = 304213, upload-time = "2026-01-08T15:47:36.807Z" }, - { url = "https://files.pythonhosted.org/packages/da/d7/07b36c33aef683b81c9afff3ec178d5eb39d325447a68c3c68a62e4abb32/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:b276b538c57733ed406948584912da422a604313c71479654848b84b9e19c9b0", size = 340624, upload-time = "2026-01-08T15:47:38.821Z" }, - { url = "https://files.pythonhosted.org/packages/7d/55/fcff2fff02a27866cb1a6614c9df2b3ace721f0a0aab2b7b8f5a7d4e4221/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_armv7l.whl", hash = "sha256:bdaf2b77e34b199cf04cde28399495fd1ed951de214a4ece1f3919b2f945bb06", size = 346705, upload-time = "2026-01-08T15:47:40.397Z" }, - { url = "https://files.pythonhosted.org/packages/41/48/67438506c2bb8bee1b4b00d7c0b3ff866401b4790849bf591d654d4ea0bc/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_i686.whl", hash = "sha256:eb2f0baf81e82f9769a7684022dca8f3bf801ca1574a3e94df1876e9d6f9271e", size = 366023, upload-time = "2026-01-08T15:47:42.662Z" }, - { url = "https://files.pythonhosted.org/packages/8b/d7/2d91ce17f62fd764d593430de296b70843cc25229c772453f7261de9e6a8/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_ppc64le.whl", hash = "sha256:6be6c4d11275f5cc402a4fdba6c2b1ce45fd3d99bb78716cd1cc2cbf6802b2ce", size = 471149, upload-time = "2026-01-08T15:47:44.963Z" }, - { url = "https://files.pythonhosted.org/packages/2e/9a/aa0756186073ba84daf5704c150d41ede10eb3185d510e02532e2071550e/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:77621cf6ceca7f42173a642a01c01c216f9eaec3b7b65d093d2d6a433ca0a83d", size = 342130, upload-time = "2026-01-08T15:47:46.331Z" }, - { url = "https://files.pythonhosted.org/packages/74/b4/3191789f4dc3bed59d79cec90559821756297a25d7dc34d1bf7781577a75/uuid_utils-0.13.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a5a9eb06c2bb86dd876cd7b2fe927fc8543d14c90d971581db6ffda4a02526f", size = 524128, upload-time = "2026-01-08T15:47:47.628Z" }, - { url = "https://files.pythonhosted.org/packages/b2/30/29839210a8fff9fc219bfa7c8d8cd115324e92618cba0cda090d54d3d321/uuid_utils-0.13.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:775347c6110fb71360df17aac74132d8d47c1dbe71233ac98197fc872a791fd2", size = 615872, upload-time = "2026-01-08T15:47:50.61Z" }, - { url = "https://files.pythonhosted.org/packages/99/ed/15000c96a8bd8f5fd8efd622109bf52549ea0b366f8ce71c45580fa55878/uuid_utils-0.13.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:cf95f6370ad1a0910ee7b5ad5228fd19c4ae32fe3627389006adaf519408c41e", size = 581023, upload-time = "2026-01-08T15:47:52.776Z" }, - { url = "https://files.pythonhosted.org/packages/67/c8/3f809fa2dc2ca4bd331c792a3c7d3e45ae2b709d85847a12b8b27d1d5f19/uuid_utils-0.13.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5a88e23e0b2f4203fefe2ccbca5736ee06fcad10e61b5e7e39c8d7904bc13300", size = 546715, upload-time = "2026-01-08T15:47:54.415Z" }, - { url = "https://files.pythonhosted.org/packages/f5/80/4f7c7efd734d1494397c781bd3d421688e9c187ae836e3174625b1ddf8b0/uuid_utils-0.13.0-cp39-abi3-win32.whl", hash = "sha256:3e4f2cc54e6a99c0551158100ead528479ad2596847478cbad624977064ffce3", size = 177650, upload-time = "2026-01-08T15:47:55.679Z" }, - { url = "https://files.pythonhosted.org/packages/6c/94/d05ab68622e66ad787a241dfe5ccc649b3af09f30eae977b9ee8f7046aaa/uuid_utils-0.13.0-cp39-abi3-win_amd64.whl", hash = "sha256:046cb2756e1597b3de22d24851b769913e192135830486a0a70bf41327f0360c", size = 183211, upload-time = "2026-01-08T15:47:57.604Z" }, - { url = "https://files.pythonhosted.org/packages/69/37/674b3ce25cd715b831ea8ebbd828b74c40159f04c95d1bb963b2c876fe79/uuid_utils-0.13.0-cp39-abi3-win_arm64.whl", hash = "sha256:5447a680df6ef8a5a353976aaf4c97cc3a3a22b1ee13671c44227b921e3ae2a9", size = 183518, upload-time = "2026-01-08T15:47:59.148Z" }, + { url = "https://files.pythonhosted.org/packages/43/b7/add4363039a34506a58457d96d4aa2126061df3a143eb4d042aedd6a2e76/uuid_utils-0.14.1-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:93a3b5dc798a54a1feb693f2d1cb4cf08258c32ff05ae4929b5f0a2ca624a4f0", size = 604679, upload-time = "2026-02-20T22:50:27.469Z" }, + { url = "https://files.pythonhosted.org/packages/dd/84/d1d0bef50d9e66d31b2019997c741b42274d53dde2e001b7a83e9511c339/uuid_utils-0.14.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ccd65a4b8e83af23eae5e56d88034b2fe7264f465d3e830845f10d1591b81741", size = 309346, upload-time = "2026-02-20T22:50:31.857Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ed/b6d6fd52a6636d7c3eddf97d68da50910bf17cd5ac221992506fb56cf12e/uuid_utils-0.14.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b56b0cacd81583834820588378e432b0696186683b813058b707aedc1e16c4b1", size = 344714, upload-time = "2026-02-20T22:50:42.642Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a7/a19a1719fb626fe0b31882db36056d44fe904dc0cf15b06fdf56b2679cf7/uuid_utils-0.14.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb3cf14de789097320a3c56bfdfdd51b1225d11d67298afbedee7e84e3837c96", size = 350914, upload-time = "2026-02-20T22:50:36.487Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fc/f6690e667fdc3bb1a73f57951f97497771c56fe23e3d302d7404be394d4f/uuid_utils-0.14.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60e0854a90d67f4b0cc6e54773deb8be618f4c9bad98d3326f081423b5d14fae", size = 482609, upload-time = "2026-02-20T22:50:37.511Z" }, + { url = "https://files.pythonhosted.org/packages/54/6e/dcd3fa031320921a12ec7b4672dea3bd1dd90ddffa363a91831ba834d559/uuid_utils-0.14.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6743ba194de3910b5feb1a62590cd2587e33a73ab6af8a01b642ceb5055862", size = 345699, upload-time = "2026-02-20T22:50:46.87Z" }, + { url = "https://files.pythonhosted.org/packages/04/28/e5220204b58b44ac0047226a9d016a113fde039280cc8732d9e6da43b39f/uuid_utils-0.14.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:043fb58fde6cf1620a6c066382f04f87a8e74feb0f95a585e4ed46f5d44af57b", size = 372205, upload-time = "2026-02-20T22:50:28.438Z" }, + { url = "https://files.pythonhosted.org/packages/c7/d9/3d2eb98af94b8dfffc82b6a33b4dfc87b0a5de2c68a28f6dde0db1f8681b/uuid_utils-0.14.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c915d53f22945e55fe0d3d3b0b87fd965a57f5fd15666fd92d6593a73b1dd297", size = 521836, upload-time = "2026-02-20T22:50:23.057Z" }, + { url = "https://files.pythonhosted.org/packages/a8/15/0eb106cc6fe182f7577bc0ab6e2f0a40be247f35c5e297dbf7bbc460bd02/uuid_utils-0.14.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:0972488e3f9b449e83f006ead5a0e0a33ad4a13e4462e865b7c286ab7d7566a3", size = 625260, upload-time = "2026-02-20T22:50:25.949Z" }, + { url = "https://files.pythonhosted.org/packages/3c/17/f539507091334b109e7496830af2f093d9fc8082411eafd3ece58af1f8ba/uuid_utils-0.14.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:1c238812ae0c8ffe77d8d447a32c6dfd058ea4631246b08b5a71df586ff08531", size = 587824, upload-time = "2026-02-20T22:50:35.225Z" }, + { url = "https://files.pythonhosted.org/packages/2e/c2/d37a7b2e41f153519367d4db01f0526e0d4b06f1a4a87f1c5dfca5d70a8b/uuid_utils-0.14.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:bec8f8ef627af86abf8298e7ec50926627e29b34fa907fcfbedb45aaa72bca43", size = 551407, upload-time = "2026-02-20T22:50:44.915Z" }, + { url = "https://files.pythonhosted.org/packages/65/36/2d24b2cbe78547c6532da33fb8613debd3126eccc33a6374ab788f5e46e9/uuid_utils-0.14.1-cp39-abi3-win32.whl", hash = "sha256:b54d6aa6252d96bac1fdbc80d26ba71bad9f220b2724d692ad2f2310c22ef523", size = 183476, upload-time = "2026-02-20T22:50:32.745Z" }, + { url = "https://files.pythonhosted.org/packages/83/92/2d7e90df8b1a69ec4cff33243ce02b7a62f926ef9e2f0eca5a026889cd73/uuid_utils-0.14.1-cp39-abi3-win_amd64.whl", hash = "sha256:fc27638c2ce267a0ce3e06828aff786f91367f093c80625ee21dad0208e0f5ba", size = 187147, upload-time = "2026-02-20T22:50:45.807Z" }, + { url = "https://files.pythonhosted.org/packages/d9/26/529f4beee17e5248e37e0bc17a2761d34c0fa3b1e5729c88adb2065bae6e/uuid_utils-0.14.1-cp39-abi3-win_arm64.whl", hash = "sha256:b04cb49b42afbc4ff8dbc60cf054930afc479d6f4dd7f1ec3bbe5dbfdde06b7a", size = 188132, upload-time = "2026-02-20T22:50:41.718Z" }, ] [[package]] name = "uvicorn" -version = "0.40.0" +version = "0.46.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/93/041fca8274050e40e6791f267d82e0e2e27dd165627bd640d3e0e378d877/uvicorn-0.46.0.tar.gz", hash = "sha256:fb9da0926999cc6cb22dc7cd71a94a632f078e6ae47ff683c5c420750fb7413d", size = 88758, upload-time = "2026-04-23T07:16:00.151Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" }, + { url = "https://files.pythonhosted.org/packages/31/a3/5b1562db76a5a488274b2332a97199b32d0442aca0ed193697fd47786316/uvicorn-0.46.0-py3-none-any.whl", hash = "sha256:bbebbcbed972d162afca128605223022bedd345b7bc7855ce66deb31487a9048", size = 70926, upload-time = "2026-04-23T07:15:58.355Z" }, ] [package.optional-dependencies] @@ -3877,7 +4311,7 @@ wheels = [ [[package]] name = "volcengine-python-sdk" -version = "5.0.5" +version = "5.0.25" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -3885,9 +4319,9 @@ dependencies = [ { name = "six" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/31/5c/827674e1be2215f4e8205fc876d9a9e0267d9a1be1dbb31fb87213331288/volcengine_python_sdk-5.0.5.tar.gz", hash = "sha256:8c3b674ab5370d93dabb74356f60236418fea785d18e9c4b967390883e87d756", size = 7381857, upload-time = "2026-01-09T13:00:05.997Z" } +sdist = { url = "https://files.pythonhosted.org/packages/be/47/b212f7c834155de1e68e8557080a445392a68e41b11cda2957d7837001eb/volcengine_python_sdk-5.0.25.tar.gz", hash = "sha256:c93f86c3638e277a19465d67d64f5222b1b506430f25b153289d6b1b705e9abd", size = 8661219, upload-time = "2026-04-23T12:58:50.569Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/5c/f20856e9d337a9feb7f66a8d3d78a86886054d9fb32ff29a0a4d6ac0d2ed/volcengine_python_sdk-5.0.5-py2.py3-none-any.whl", hash = "sha256:c9b91261386d7f2c1ccfc48169c4b319c58f3c66cc5e492936b5dfb6d25e1a5f", size = 29018827, upload-time = "2026-01-09T13:00:01.827Z" }, + { url = "https://files.pythonhosted.org/packages/65/28/4ab6e1f4241f3bf84aacf0a3777ddd69fe28b662a0c6a56cf932c9064ddc/volcengine_python_sdk-5.0.25-py2.py3-none-any.whl", hash = "sha256:348dbb1cb11e922bebf8360fc61a189e7d6f8be4b8211423ead3a2953eed7755", size = 34196029, upload-time = "2026-04-23T12:58:43.948Z" }, ] [[package]] @@ -3960,6 +4394,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, ] +[[package]] +name = "wcwidth" +version = "0.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/35/a2/8e3becb46433538a38726c948d3399905a4c7cabd0df578ede5dc51f0ec2/wcwidth-0.6.0.tar.gz", hash = "sha256:cdc4e4262d6ef9a1a57e018384cbeb1208d8abbc64176027e2c2455c81313159", size = 159684, upload-time = "2026-02-06T19:19:40.919Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/5a/199c59e0a824a3db2b89c5d2dade7ab5f9624dbf6448dc291b46d5ec94d3/wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad", size = 94189, upload-time = "2026-02-06T19:19:39.646Z" }, +] + [[package]] name = "webencodings" version = "0.5.1" @@ -4108,179 +4551,219 @@ wheels = [ [[package]] name = "xxhash" -version = "3.6.0" +version = "3.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/2f/e183a1b407002f5af81822bee18b61cdb94b8670208ef34734d8d2b8ebe9/xxhash-3.7.0.tar.gz", hash = "sha256:6cc4eefbb542a5d6ffd6d70ea9c502957c925e800f998c5630ecc809d6702bae", size = 82022, upload-time = "2026-04-25T11:10:32.553Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, - { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, - { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, - { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, - { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, - { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, - { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, - { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, - { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, - { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, - { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, - { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, - { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, - { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, - { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, - { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" }, - { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" }, - { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" }, - { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" }, - { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" }, - { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" }, - { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" }, - { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" }, - { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" }, - { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" }, - { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" }, - { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" }, - { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" }, - { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" }, - { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" }, - { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" }, - { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" }, - { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" }, - { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" }, - { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" }, - { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" }, - { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" }, - { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" }, - { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" }, - { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" }, - { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" }, - { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" }, - { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" }, - { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" }, - { url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" }, - { url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" }, - { url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" }, - { url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" }, - { url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" }, - { url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" }, - { url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" }, - { url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" }, - { url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" }, - { url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" }, - { url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" }, - { url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" }, - { url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" }, - { url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" }, - { url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" }, - { url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" }, - { url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" }, - { url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" }, - { url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" }, - { url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" }, - { url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" }, - { url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" }, - { url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" }, - { url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" }, - { url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" }, - { url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" }, - { url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" }, - { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, - { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, - { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, + { url = "https://files.pythonhosted.org/packages/f2/8a/51a14cdef4728c6c2337db8a7d8704422cc65676d9199d77215464c880af/xxhash-3.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:082c87bfdd2b9f457606c7a4a53457f4c4b48b0cdc48de0277f4349d79bb3d7a", size = 33357, upload-time = "2026-04-25T11:06:20.44Z" }, + { url = "https://files.pythonhosted.org/packages/b9/1b/0c2c933809421ffd9bf42b59315552c143c755db5d9a816b2f1ae273e884/xxhash-3.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5e7ce913b61f35b0c1c839a49ac9c8e75dd8d860150688aed353b0ce1bf409d8", size = 30869, upload-time = "2026-04-25T11:06:21.989Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/89d5fdd6ee12d70ba99451de46dd0e8010167468dcd913ec855653f4dd50/xxhash-3.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3beb1de3b1e9694fcdd853e570ee64c631c7062435d2f8c69c1adf809bc086f0", size = 194100, upload-time = "2026-04-25T11:06:23.586Z" }, + { url = "https://files.pythonhosted.org/packages/87/ee/2f9f2ed993e77206d1e66991290a1ebe22e843351ca3ebec8e49e01ba186/xxhash-3.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3e7b689c3bce16699efcf736066f5c6cc4472c3840fe4b22bd8279daf4abdac", size = 212977, upload-time = "2026-04-25T11:06:25.019Z" }, + { url = "https://files.pythonhosted.org/packages/de/60/5a91644615a9e9d4e42c2e9925f1908e3a24e4e691d9de7340d565bea024/xxhash-3.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a6545e6b409e3d5cbafc850fb84c55a1ca26ed15a6b11e3bf07a0e0cd84517c8", size = 236373, upload-time = "2026-04-25T11:06:26.482Z" }, + { url = "https://files.pythonhosted.org/packages/22/c0/f3a9384eaaed9d14d4d062a5d953aa0da489bfe9747877aa994caa87cd0b/xxhash-3.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:31ab1461c77a11461d703c88eb949e132a1c6515933cf675d97ec680f4bd18de", size = 212229, upload-time = "2026-04-25T11:06:28.065Z" }, + { url = "https://files.pythonhosted.org/packages/2e/67/02f07a9fd79726804190f2172c4894c3ed9a4ebccaca05653c84beb58025/xxhash-3.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7c4d596b7676f811172687ec567cbafb9e4dea2f9be1bbb4f622410cb7f40f40", size = 445462, upload-time = "2026-04-25T11:06:30.048Z" }, + { url = "https://files.pythonhosted.org/packages/40/37/558f5a90c0672fc9b4402dc25d87ac5b7406616e8969430c9ca4e52ee74d/xxhash-3.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13805f0461cba0a857924e70ff91ae6d52d2598f79a884e788db80532614a4a1", size = 193932, upload-time = "2026-04-25T11:06:31.857Z" }, + { url = "https://files.pythonhosted.org/packages/d5/90/aaa09cd58661d32044dbbad7df55bbe22a623032b810e7ed3b8c569a2a6f/xxhash-3.7.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1d398f372496152f1c6933a33566373f8d1b37b98b8c9d608fa6edc0976f23b2", size = 284807, upload-time = "2026-04-25T11:06:33.697Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f3/53df3719ab127a02c174f0c1c74924fcd110866e89c966bc7909cfa8fa84/xxhash-3.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d610aa62cdb7d4d497740741772a24a794903bf3e79eaa51d2e800082abe11e5", size = 210445, upload-time = "2026-04-25T11:06:35.488Z" }, + { url = "https://files.pythonhosted.org/packages/72/33/d219975c0e8b6fa2eb9ccd486fe47e21bf1847985b878dd2fbc3126e0d5c/xxhash-3.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:073c23900a9fbf3d26616c17c830db28af9803677cd5b33aea3224d824111514", size = 241273, upload-time = "2026-04-25T11:06:37.24Z" }, + { url = "https://files.pythonhosted.org/packages/3e/50/49b1afe610eb3964cedcb90a4d4c3d46a261ee8669cbd4f060652619ae3c/xxhash-3.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:418a463c3e6a590c0cdc890f8be19adb44a8c8acd175ca5b2a6de77e61d0b386", size = 197950, upload-time = "2026-04-25T11:06:39.148Z" }, + { url = "https://files.pythonhosted.org/packages/c6/75/5f42a1a4c78717d906a4b6a140c6dbf837ab1f547a54d23c4e2903310936/xxhash-3.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:03f8ff4474ee61c845758ce00711d7087a770d77efb36f7e74a6e867301000b8", size = 210709, upload-time = "2026-04-25T11:06:40.958Z" }, + { url = "https://files.pythonhosted.org/packages/8a/85/237e446c25abced71e9c53d269f2cef5bab8a82b3f88a12e00c5368e7368/xxhash-3.7.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:44fba4a5f1d179b7ddc7b3dc40f56f9209046421679b57025d4d8821b376fd8d", size = 275345, upload-time = "2026-04-25T11:06:42.525Z" }, + { url = "https://files.pythonhosted.org/packages/62/34/c2c26c0a6a9cc739bc2a5f0ae03ba8b87deb12b8bce35f7ac495e790dc6d/xxhash-3.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31e3516a0f829d06ded4a2c0f3c7c5561993256bfa1c493975fb9dc7bfa828a1", size = 414056, upload-time = "2026-04-25T11:06:44.343Z" }, + { url = "https://files.pythonhosted.org/packages/a0/aa/5c58e9bc8071b8afd8dcf297ff362f723c4892168faba149f19904132bf4/xxhash-3.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b59ee2ac81de57771a09ecad09191e840a1d2fae1ef684208320591055768f83", size = 191485, upload-time = "2026-04-25T11:06:46.262Z" }, + { url = "https://files.pythonhosted.org/packages/d4/69/a929cf9d1e2e65a48b818cdce72cb6b69eab2e6877f21436d0a1942aff43/xxhash-3.7.0-cp312-cp312-win32.whl", hash = "sha256:74bbd92f8c7fcc397ba0a11bfdc106bc72ad7f11e3a60277753f87e7532b4d81", size = 30671, upload-time = "2026-04-25T11:06:48.039Z" }, + { url = "https://files.pythonhosted.org/packages/b9/1b/104b41a8947f4e1d4a66ce1e628eea752f37d1890bfd7453559ca7a3d950/xxhash-3.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:7bd7bc82dd4f185f28f35193c2e968ef46131628e3cac62f639dadf321cba4d1", size = 31514, upload-time = "2026-04-25T11:06:49.279Z" }, + { url = "https://files.pythonhosted.org/packages/98/a0/1fd0ea1f1b886d9e7c73f0397571e22333a7d79e31da6d7127c2a4a71d75/xxhash-3.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:7d7148180ec99ba36585b42c8c5de25e9b40191613bc4be68909b4d25a77a852", size = 27761, upload-time = "2026-04-25T11:06:50.448Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ca/d5174b4c36d10f64d4ca7050563138c5a599efb01a765858ddefc9c1202a/xxhash-3.7.0-cp313-cp313-android_21_arm64_v8a.whl", hash = "sha256:4b6d6b33f141158692bd4eafbb96edbc5aa0dabdb593a962db01a91983d4f8fa", size = 36813, upload-time = "2026-04-25T11:06:51.73Z" }, + { url = "https://files.pythonhosted.org/packages/41/d0/abc6c9d347ba1f1e1e1d98125d0881a0452c7f9a76a9dd03a7b5d2197f23/xxhash-3.7.0-cp313-cp313-android_21_x86_64.whl", hash = "sha256:845d347df254d6c619f616afa921331bada8614b8d373d58725c663ba97c3605", size = 35121, upload-time = "2026-04-25T11:06:53.048Z" }, + { url = "https://files.pythonhosted.org/packages/bf/11/4cc834eb3d79f2f2b3a6ef7324195208bcdfbdcf7534d2b17267aa5f3a8f/xxhash-3.7.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:fddbbb69a6fff4f421e7a0d1fa28f894b20112e9e3fab306af451e2dfd0e459b", size = 29624, upload-time = "2026-04-25T11:06:54.311Z" }, + { url = "https://files.pythonhosted.org/packages/23/83/e97d3e7b635fe73a1dfb1e91f805324dd6d930bb42041cbf18f183bc0b6d/xxhash-3.7.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:54876a4e45101cec2bf8f31a973cda073a23e2e108538dad224ba07f85f22487", size = 30638, upload-time = "2026-04-25T11:06:55.864Z" }, + { url = "https://files.pythonhosted.org/packages/f4/40/d84951d80c35db1f4c40a29a64a8520eea5d56e764c603906b4fe763580f/xxhash-3.7.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:0c72fe9c7e3d6dfd7f1e21e224a877917fa09c465694ba4e06464b9511b65544", size = 33323, upload-time = "2026-04-25T11:06:57.336Z" }, + { url = "https://files.pythonhosted.org/packages/89/cc/c7dc6558d97e9ab023f663d69ab28b340ed9bf4d2d94f2c259cf896bb354/xxhash-3.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a6d73a830b17ef49bc04e00182bd839164c1b3c59c127cd7c54fcb10c7ed8ee8", size = 33362, upload-time = "2026-04-25T11:06:58.656Z" }, + { url = "https://files.pythonhosted.org/packages/2a/6e/46b84017b1301d54091430353d4ad5901654a3e0871649877a416f7f1644/xxhash-3.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:91c3b07cf3362086d8f126c6aecd8e5e9396ad8b2f2219ea7e49a8250c318acd", size = 30874, upload-time = "2026-04-25T11:06:59.834Z" }, + { url = "https://files.pythonhosted.org/packages/df/5e/8f9158e3ab906ad3fec51e09b5ea0093e769f12207bfa42a368ca204e7ab/xxhash-3.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:50e879ebbac351c81565ca108db766d7832f5b8b6a5b14b8c0151f7190028e3d", size = 194185, upload-time = "2026-04-25T11:07:01.658Z" }, + { url = "https://files.pythonhosted.org/packages/f3/29/a804ded9f5d3d3758292678d23e7528b08fda7b7e750688d08b052322475/xxhash-3.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:921c14e93817842dd0dd9f372890a0f0c72e534650b6ab13c5be5cd0db11d47e", size = 213033, upload-time = "2026-04-25T11:07:03.606Z" }, + { url = "https://files.pythonhosted.org/packages/8b/91/1ce5a7d2fdc975267320e2c78fc1cecfe7ab735ccbcf6993ec5dd541cb2c/xxhash-3.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e64a7c9d7dfca3e0fafcbc5e455519090706a3e36e95d655cec3e04e79f95aaa", size = 236140, upload-time = "2026-04-25T11:07:05.396Z" }, + { url = "https://files.pythonhosted.org/packages/34/04/fd595a4fd8617b05fa27bd9b684ecb4985bfed27917848eea85d54036d06/xxhash-3.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2220af08163baf5fa36c2b8af079dc2cbe6e66ae061385267f9472362dfd53c6", size = 212291, upload-time = "2026-04-25T11:07:06.966Z" }, + { url = "https://files.pythonhosted.org/packages/03/fb/f1a379cbc372ae5b9f4ab36154c48a849ca6ebe3ac477067a57865bf3bc6/xxhash-3.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f14bb8b22a4a91325813e3d553b8963c10cf8c756cff65ee50c194431296c655", size = 445532, upload-time = "2026-04-25T11:07:08.525Z" }, + { url = "https://files.pythonhosted.org/packages/65/59/172424b79f8cfd4b6d8a122b2193e6b8ad4b11f7159bb3b6f9b3191329bb/xxhash-3.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:496736f86a9bedaf64b0dc70e3539d0766df01c71ea22032698e88f3f04a1ce9", size = 193990, upload-time = "2026-04-25T11:07:10.315Z" }, + { url = "https://files.pythonhosted.org/packages/b9/19/aeac22161d953f139f07ba5586cb4a17c5b7b6dff985122803bb12933500/xxhash-3.7.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0ff71596bd79816975b3de7130ab1ff4541410285a3c084584eeb1c8239996fd", size = 284876, upload-time = "2026-04-25T11:07:12.15Z" }, + { url = "https://files.pythonhosted.org/packages/77/d5/4fd0b59e7a02242953da05ff679fbb961b0a4368eac97a217e11dae110c1/xxhash-3.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1ad86695c19b1d46fe106925db3c7a37f16be37669dcf58dcc70a9dd6e324676", size = 210495, upload-time = "2026-04-25T11:07:13.952Z" }, + { url = "https://files.pythonhosted.org/packages/aa/fb/976a3165c728c7faf74aa1b5ab3cf6a85e6d731612894741840524c7d28c/xxhash-3.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:970f9f8c50961d639cbd0d988c96f80ddf66006de93641719282c4fe7a87c5e6", size = 241331, upload-time = "2026-04-25T11:07:15.557Z" }, + { url = "https://files.pythonhosted.org/packages/4a/2c/6763d5901d53ac9e6ba296e5717ae599025c9d268396e8faa8b4b0a8e0ac/xxhash-3.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5886ad85e9e347911783760a1d16cb6b393e8f9e3b52c982568226cb56927bdc", size = 198037, upload-time = "2026-04-25T11:07:17.563Z" }, + { url = "https://files.pythonhosted.org/packages/61/2b/876e722d533833f5f9a83473e6ba993e48745701096944e77bbecf29b2c3/xxhash-3.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6e934bbae1e0ec74e27d5f0d7f37ef547ce5ff9f0a7e63fb39e559fc99526734", size = 210744, upload-time = "2026-04-25T11:07:19.055Z" }, + { url = "https://files.pythonhosted.org/packages/21/e6/d7e7baef7ce24166b4668d3c48557bb35a23b92ecadcac7e7718d099ab69/xxhash-3.7.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:3b6b3d28228af044ebcded71c4a3dd86e1dbd7e2f4645bf40f7b5da65bb5fb5a", size = 275406, upload-time = "2026-04-25T11:07:20.908Z" }, + { url = "https://files.pythonhosted.org/packages/92/fe/198b3763b2e01ca908f2154969a2352ec99bda892b574a11a9a151c5ede4/xxhash-3.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:6be4d70d9ab76c9f324ead9c01af6ff52c324745ea0c3731682a0cf99720f1fe", size = 414125, upload-time = "2026-04-25T11:07:23.037Z" }, + { url = "https://files.pythonhosted.org/packages/3a/6d/019a11affd5a5499137cacca53808659964785439855b5aa40dfd3412916/xxhash-3.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:151d7520838d4465461a0b7f4ae488b3b00de16183dd3214c1a6b14bf89d7fb6", size = 191555, upload-time = "2026-04-25T11:07:24.991Z" }, + { url = "https://files.pythonhosted.org/packages/76/21/b96d58568df2d01533244c3e0e5cbdd0c8b2b25c4bec4d72f19259a292d7/xxhash-3.7.0-cp313-cp313-win32.whl", hash = "sha256:d798c1e291bffb8e37b5bbe0dda77fc767cd19e89cadaf66e6ed5d0ff88c9fe6", size = 30668, upload-time = "2026-04-25T11:07:26.665Z" }, + { url = "https://files.pythonhosted.org/packages/99/57/d849a8d3afa1f8f4bc6a831cd89f49f9706fbbad94d2975d6140a171988c/xxhash-3.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:875811ba23c543b1a1c3143c926e43996eb27ebb8f52d3500744aa608c275aed", size = 31524, upload-time = "2026-04-25T11:07:27.92Z" }, + { url = "https://files.pythonhosted.org/packages/81/52/bacc753e92dee78b058af8dcef0a50815f5f860986c664a92d75f965b6a5/xxhash-3.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:54a675cb300dda83d71daae2a599389d22db8021a0f8db0dd659e14626eb3ecc", size = 27768, upload-time = "2026-04-25T11:07:29.113Z" }, + { url = "https://files.pythonhosted.org/packages/1c/47/ddbd683b7fc7e592c1a8d9d65f73ce9ab513f082b3967eee2baf549b8fc6/xxhash-3.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a3b19a42111c4057c1547a4a1396a53961dca576a0f6b82bfa88a2d1561764b2", size = 33576, upload-time = "2026-04-25T11:07:30.469Z" }, + { url = "https://files.pythonhosted.org/packages/07/f2/36d3310161db7f72efb4562aadde0ed429f1d0531782dd6345b12d2da527/xxhash-3.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8f4608a06e4d61b7a3425665a46d00e0579122e1a2fae97a0c52953a3aad9aa3", size = 31123, upload-time = "2026-04-25T11:07:31.989Z" }, + { url = "https://files.pythonhosted.org/packages/0d/3f/75937a5c69556ed213021e43cbedd84c8e0279d0d74e7d41a255d84ba4b1/xxhash-3.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ad37c7792479e49cf96c1ab25517d7003fe0d93687a772ba19a097d235bbe41e", size = 196491, upload-time = "2026-04-25T11:07:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/22/29/f10d7ff8c7a733d4403a43b9de18c8fabc005f98cec054644f04418659ee/xxhash-3.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc026e3b89d98e30a8288c95cb696e77d150b3f0fb7a51f73dcd49ee6b5577fa", size = 215793, upload-time = "2026-04-25T11:07:34.919Z" }, + { url = "https://files.pythonhosted.org/packages/8b/fd/778f60aa295f58907938f030a8b514611f391405614a525cccd2ffc00eb5/xxhash-3.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c9b31ab1f28b078a6a1ac1a54eb35e7d5390deddd56870d0be3a0a733d1c321c", size = 237993, upload-time = "2026-04-25T11:07:36.638Z" }, + { url = "https://files.pythonhosted.org/packages/70/f5/736db5de387b4a540e37a05b84b40dc58a1ce974bfd2b4e5754ce29b68c3/xxhash-3.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3bb5fd680c038fd5229e44e9c493782f90df9bef632fd0499d442374688ff70b", size = 214887, upload-time = "2026-04-25T11:07:38.564Z" }, + { url = "https://files.pythonhosted.org/packages/4d/aa/09a095f22fdb9a27fbb716841fbff52119721f9ca4261952d07a912f7839/xxhash-3.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:030c0fd688fce3569fbb49a2feefd4110cbb0b650186fb4610759ecfac677548", size = 448407, upload-time = "2026-04-25T11:07:40.552Z" }, + { url = "https://files.pythonhosted.org/packages/74/8a/b745efeeca9e34a91c26fdc97ad8514c43d5a81ac78565cba80a1353870a/xxhash-3.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b1bde10324f4c31812ae0d0502e92d916ae8917cad7209353f122b8b8f610c3", size = 196119, upload-time = "2026-04-25T11:07:42.101Z" }, + { url = "https://files.pythonhosted.org/packages/8a/5c/0cfceb024af90c191f665c7933b1f318ee234f4797858383bebd1881d52f/xxhash-3.7.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:503722d52a615f2604f5e7611de7d43878df010dc0053094ef91cb9a9ac3d987", size = 286751, upload-time = "2026-04-25T11:07:43.568Z" }, + { url = "https://files.pythonhosted.org/packages/0b/0a/0793e405dc3cf8f4ebe2c1acec1e4e4608cd9e7e50ea691dabbc2a95ccbb/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c72500a3b6d6c30ebfc135035bcace9eb5884f2dc220804efcaaba43e9f611dd", size = 212961, upload-time = "2026-04-25T11:07:45.388Z" }, + { url = "https://files.pythonhosted.org/packages/0c/7e/721118ffc63bfff94aa565bcf2555a820f9f4bdb0f001e0d609bdfad70de/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:43475925a766d01ca8cd9a857fd87f3d50406983c8506a4c07c4df12adcc867f", size = 243703, upload-time = "2026-04-25T11:07:47.053Z" }, + { url = "https://files.pythonhosted.org/packages/6e/18/16f6267160488b8276fd3d449d425712512add292ba545c1b6946bfdb7dd/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8d09dfd2ab135b985daf868b594315ebe11ad86cd9fea46e6c69f19b28f7d25a", size = 200894, upload-time = "2026-04-25T11:07:48.657Z" }, + { url = "https://files.pythonhosted.org/packages/2d/94/80ba841287fd97e3e9cac1d228788c8ef623746f570404961eec748ecb5c/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c50269d0055ac1faecfd559886d2cbe4b730de236585aba0e873f9d9dadbe585", size = 213357, upload-time = "2026-04-25T11:07:50.257Z" }, + { url = "https://files.pythonhosted.org/packages/a1/7e/106d4067130c59f1e18a55ffadcd876d8c68534883a1e02685b29d3d8153/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:1910df4756a5ab58cfad8744fc2d0f23926e3efcc346ee76e87b974abab922f4", size = 277600, upload-time = "2026-04-25T11:07:51.745Z" }, + { url = "https://files.pythonhosted.org/packages/c5/86/a081dd30da71d720b2612a792bfd55e45fa9a07ac76a0507f60487473c25/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d006faf3b491957efcb433489be3c149efe4787b7063d5cddb8ddaefdc60e0c1", size = 416980, upload-time = "2026-04-25T11:07:53.504Z" }, + { url = "https://files.pythonhosted.org/packages/35/29/1a95221a029a3c1293773869e1ab47b07cbbdd82444a42809e8c60156626/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:abb65b4e947e958f7b3b0d71db3ce447d1bc5f37f5eab871ce7223bda8768a04", size = 193840, upload-time = "2026-04-25T11:07:55.103Z" }, + { url = "https://files.pythonhosted.org/packages/c5/e0/db909dd0823285de2286f67e10ee4d81e96ad35d7d8e964ecb07fccd8af9/xxhash-3.7.0-cp313-cp313t-win32.whl", hash = "sha256:178959906cb1716a1ce08e0d69c82886c70a15a6f2790fc084fdd146ca30cd49", size = 30966, upload-time = "2026-04-25T11:07:56.524Z" }, + { url = "https://files.pythonhosted.org/packages/7b/ff/d705b15b22f21ee106adce239cb65d35067a158c630b240270f09b17c2e6/xxhash-3.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2524a1e20d4c231d13b50f7cf39e44265b055669a64a7a4b9a2a44faa03f19b6", size = 31784, upload-time = "2026-04-25T11:07:57.758Z" }, + { url = "https://files.pythonhosted.org/packages/a2/1f/b2cf83c3638fd0588e0b17f22e5a9400bdfb1a3e3755324ac0aee2250b88/xxhash-3.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:37d994d0ffe81ef087bb330d392caa809bb5853c77e22ea3f71db024a0543dba", size = 27932, upload-time = "2026-04-25T11:07:59.109Z" }, + { url = "https://files.pythonhosted.org/packages/0e/cc/431db584f6fbb9312e40a173af027644e5580d39df1f73603cbb9dca4d6b/xxhash-3.7.0-cp314-cp314-android_24_arm64_v8a.whl", hash = "sha256:8c5fcfd806c335bfa2adf1cd0b3110a44fc7b6995c3a648c27489bae85801465", size = 36644, upload-time = "2026-04-25T11:08:00.658Z" }, + { url = "https://files.pythonhosted.org/packages/bc/01/255ec513e0a705d1f9a61413e78dfce4e3235203f0ed525a24c2b4b56345/xxhash-3.7.0-cp314-cp314-android_24_x86_64.whl", hash = "sha256:506a0b488f190f0a06769575e30caf71615c898ed93ab18b0dbcb6dec5c3713c", size = 35003, upload-time = "2026-04-25T11:08:02.338Z" }, + { url = "https://files.pythonhosted.org/packages/68/70/c55fc33c93445b44d8fc5a17b41ed99e3cebe92bcf8396809e63fc9a1165/xxhash-3.7.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:ec68dbba21532c0173a9872298e65c89749f7c9d21538c3a78b5bb6105871568", size = 29655, upload-time = "2026-04-25T11:08:03.701Z" }, + { url = "https://files.pythonhosted.org/packages/c2/72/ff8de73df000d74467d12a59ce6d6e2b2a368b978d41ab7b1fba5ed442be/xxhash-3.7.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:fa77e7ec1450d415d20129961814787c9abd9a07f98872f070b1fe96c5084611", size = 30664, upload-time = "2026-04-25T11:08:05.011Z" }, + { url = "https://files.pythonhosted.org/packages/b6/91/08416d9bd9bc3bf39d831abe8a5631ac2db5141dfd6fe81c3fe59a1f9264/xxhash-3.7.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:fe32736295ea38e43e7d9424053c8c47c9f64fecfc7c895fb3da9b30b131c9ee", size = 33317, upload-time = "2026-04-25T11:08:06.413Z" }, + { url = "https://files.pythonhosted.org/packages/0e/3b/86b1caa4dee10a99f4bf9521e623359341c5e50d05158fa10c275b2bd079/xxhash-3.7.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:ab9dd2c83c4bbd63e422181a76f13502d049d3ddcac9a1bdc29196263d692bb8", size = 33457, upload-time = "2026-04-25T11:08:08.099Z" }, + { url = "https://files.pythonhosted.org/packages/ed/38/98ea14ad1517e1461292a65906951458d520689782bfbae111050145bdba/xxhash-3.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3afec3a336a2286601a437cb07562ab0227685e6fbb9ec17e8c18457ff348ecf", size = 30894, upload-time = "2026-04-25T11:08:09.429Z" }, + { url = "https://files.pythonhosted.org/packages/61/a2/074654d0b893606541199993c7db70067d9fc63b748e0d60020a52a1bd36/xxhash-3.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:565df64437a9390f84465dcca33e7377114c7ede8d05cd2cf20081f831ea788e", size = 194409, upload-time = "2026-04-25T11:08:10.91Z" }, + { url = "https://files.pythonhosted.org/packages/e2/26/6d2a1afc468189f77ca28c32e1c83e1b9da1178231e05641dbc1b350e332/xxhash-3.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:12eca820a5d558633d423bf8bb78ce72a55394823f64089247f788a7e0ae691e", size = 213135, upload-time = "2026-04-25T11:08:12.575Z" }, + { url = "https://files.pythonhosted.org/packages/8e/0e/d8aecf95e09c42547453137be74d2f7b8b14e08f5177fa2fab6144a19061/xxhash-3.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f262b8f7599516567e070abf607b9af649052b2c4bd6f9be02b0cb41b7024805", size = 236379, upload-time = "2026-04-25T11:08:14.206Z" }, + { url = "https://files.pythonhosted.org/packages/f2/74/8140e8210536b3dd0cc816c4faaeb5ba6e63e8125ab25af4bcddd6a037b3/xxhash-3.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1598916cb197681e03e601901e4ab96a9a963de398c59d0964f8a6f44a2b361", size = 212447, upload-time = "2026-04-25T11:08:15.79Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d2/462001d2903b4bee5a5689598a0a55e5e7cd1ac7f4247a5545cff10d3ebb/xxhash-3.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:322b2f0622230f526aeb1738149948a7ae357a9e2ceb1383c6fd1fdaecdafa16", size = 445660, upload-time = "2026-04-25T11:08:17.441Z" }, + { url = "https://files.pythonhosted.org/packages/23/09/2bd1ed7f8689b20e51727952cac8329d50c694dc32b2eba06ba5bc742b37/xxhash-3.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24cc22070880cc57b830a65cde4e65fa884c6d9b28ae4803b5ee05911e7bafba", size = 194076, upload-time = "2026-04-25T11:08:19.134Z" }, + { url = "https://files.pythonhosted.org/packages/c9/6e/692302cd0a5f4ac4e6289f37fa888dc2e1e07750b68fe3e4bfe939b8cea3/xxhash-3.7.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb5a888a968b2434abf9ecda357b5d43f10d7b5a6da6fdbbe036208473aff0e2", size = 284990, upload-time = "2026-04-25T11:08:20.618Z" }, + { url = "https://files.pythonhosted.org/packages/05/d9/e54b159b3d9df7999d2a7c676ce7b323d1b5588a64f8f51ed8172567bd87/xxhash-3.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a999771ff97bec27d18341be4f3a36b163bb1ac41ec17bef6d2dabd84acd33c7", size = 210590, upload-time = "2026-04-25T11:08:22.24Z" }, + { url = "https://files.pythonhosted.org/packages/50/93/0e0df1a3a196ced4ca71de76d65ead25d8e87bbfb87b64306ea47a40c00d/xxhash-3.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:ed4a6efe2dee1655adb73e7ad40c6aa955a6892422b1e3b95de6a34de56e3cbb", size = 241442, upload-time = "2026-04-25T11:08:23.844Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a9/d917a7a814e90b218f8a0d37967105eea91bf752c3303683c99a1f7bfc1f/xxhash-3.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9fd17f14ac0faa12126c2f9ca774a8cf342957265ec3c8669c144e5e6cdb478c", size = 198356, upload-time = "2026-04-25T11:08:25.99Z" }, + { url = "https://files.pythonhosted.org/packages/89/5e/f2ba1877c39469abbefc72991d6ebdcbd4c0880db01ae8cb1f553b0c537d/xxhash-3.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:05fd1254268c59b5cb2a029dfc204275e9fc52de2913f1e53aa8d01442c96b4d", size = 210898, upload-time = "2026-04-25T11:08:27.608Z" }, + { url = "https://files.pythonhosted.org/packages/90/c6/be56b58e73de531f39a10de1355bb77ceb663900dc4bf2d6d3002a9c3f9e/xxhash-3.7.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:a2eae53197c6276d5b317f75a1be226bbf440c20b58bf525f36b5d0e1f657ca6", size = 275519, upload-time = "2026-04-25T11:08:29.301Z" }, + { url = "https://files.pythonhosted.org/packages/92/e2/17ddc85d5765b9c709f192009ed8f5a1fc876f4eb35bba7c307b5b1169f9/xxhash-3.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:bfe6f92e3522dcbe8c4281efd74fa7542a336cb00b0e3272c4ec0edabeaeaf67", size = 414191, upload-time = "2026-04-25T11:08:31.16Z" }, + { url = "https://files.pythonhosted.org/packages/9c/42/85f5b79f4bf1ec7ba052491164adfd4f4e9519f5dc7246de4fbd64a1bd56/xxhash-3.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7ab9a49c410d8c6c786ab99e79c529938d894c01433130353dd0fe999111077a", size = 191604, upload-time = "2026-04-25T11:08:32.862Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d0/6127b623aa4cca18d8b7743592b048d689fd6c6e37ff26a22cddf6cd9d7f/xxhash-3.7.0-cp314-cp314-win32.whl", hash = "sha256:040ea63668f9185b92bc74942df09c7e65703deed71431333678fc6e739a9955", size = 31271, upload-time = "2026-04-25T11:08:34.651Z" }, + { url = "https://files.pythonhosted.org/packages/64/4f/44fc4788568004c43921701cbc127f48218a1eede2c9aea231115323564d/xxhash-3.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2a61e2a3fb23c892496d587b470dee7fa1b58b248a187719c65ea8e94ec13257", size = 32284, upload-time = "2026-04-25T11:08:35.987Z" }, + { url = "https://files.pythonhosted.org/packages/6d/77/18bb895eb60a49453d16e17d67990e5caff557c78eafc90ad4e2eabf4570/xxhash-3.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:c7741c7524961d8c0cb4d4c21b28957ff731a3fd5b5cd8b856dc80a40e9e5acc", size = 28701, upload-time = "2026-04-25T11:08:37.767Z" }, + { url = "https://files.pythonhosted.org/packages/45/a0/46f72244570c550fbbb7db1ef554183dd5ebe9136385f30e032b781ae8f6/xxhash-3.7.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:fc84bf7aa7592f31ec63a3e7b11d624f468a3f19f5238cec7282a42e838ab1d7", size = 33646, upload-time = "2026-04-25T11:08:39.109Z" }, + { url = "https://files.pythonhosted.org/packages/4a/3a/453846a7eceea11e75def361eed01ec6a0205b9822c19927ed364ccae7cc/xxhash-3.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9f1563fdc8abfc389748e6932c7e4e99c89a53e4ec37d4563c24fc06f5e5644b", size = 31125, upload-time = "2026-04-25T11:08:40.467Z" }, + { url = "https://files.pythonhosted.org/packages/bd/3e/49434aba738885d512f9e486db1bdd19db28dfa40372b56da26ef7a4e738/xxhash-3.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2d415f18becf6f153046ab6adc97da77e3643a0ee205dae61c4012604113a020", size = 196633, upload-time = "2026-04-25T11:08:41.943Z" }, + { url = "https://files.pythonhosted.org/packages/a4/e9/006cb6127baeb9f8abe6d15e62faa01349f09b34e2bfd65175b2422d026b/xxhash-3.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bb16aa13ed175bc9be5c2491ba031b85a9b51c4ed90e0b3d4ebe63cf3fb54f8e", size = 215899, upload-time = "2026-04-25T11:08:43.645Z" }, + { url = "https://files.pythonhosted.org/packages/27/e4/cc57d72e66df0ae29b914335f1c6dcf61e8f3746ddf0ae3c471aa4f15e00/xxhash-3.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f9fd595f1e5941b3d7863e4774e4b30caa6731fc34b9277da032295aa5656ee5", size = 238116, upload-time = "2026-04-25T11:08:45.698Z" }, + { url = "https://files.pythonhosted.org/packages/af/78/3531d4a3fd8a0038cc6be1f265a69c1b3587f557a10b677dd736de2202c1/xxhash-3.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1295325c5a98d552333fa53dc2b026b0ef0ec9c8e73ca3a952990b4c7d65d459", size = 215012, upload-time = "2026-04-25T11:08:47.355Z" }, + { url = "https://files.pythonhosted.org/packages/b4/f6/259fb1eaaec921f59b17203b0daee69829761226d3b980d5191d7723dd83/xxhash-3.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3573a651d146912da9daa9e29e5fbc45994420daaa9ef1e2fa5823e1dc485513", size = 448534, upload-time = "2026-04-25T11:08:49.149Z" }, + { url = "https://files.pythonhosted.org/packages/7b/16/a66d0eaf6a7e68532c07714361ddc904c663ec940f3b028c1ae4a21a7b9d/xxhash-3.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ec1e080a3d02d94ea9335bfab0e3374b877e25411422c18f51a943fa4b46381", size = 196217, upload-time = "2026-04-25T11:08:50.805Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ef/d2efc7fc51756dc52509109d1a25cefc859d74bc4b19a167b12dbd8c2786/xxhash-3.7.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84415265192072d8638a3afc3c1bc5995e310570cd9acb54dc46d3939e364fe0", size = 286906, upload-time = "2026-04-25T11:08:52.418Z" }, + { url = "https://files.pythonhosted.org/packages/fc/67/25decd1d4a4018582ec4db2a868a2b7e40640f4adb20dfeb19ac923aa825/xxhash-3.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8d4dea659b57443989ef32f4295104fd6912c73d0bf26d1d148bb88a9f159b02", size = 213057, upload-time = "2026-04-25T11:08:54.105Z" }, + { url = "https://files.pythonhosted.org/packages/0d/5d/17651eb29d06786cdc40c60ae3d27d645aa5d61d2eca6237a7ba0b94789b/xxhash-3.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:05ece0fe4d9c9c2728912d1981ae1566cfc83a011571b24732cbf76e1fb70dca", size = 243886, upload-time = "2026-04-25T11:08:56.109Z" }, + { url = "https://files.pythonhosted.org/packages/8a/d4/174d9cf7502243d586e6a9ae842b1ae23026620995114f85f1380e588bc9/xxhash-3.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:fd880353cf1ffaf321bc18dd663e111976dbd0d3bbd8a66d58d2b470dfa7f396", size = 201015, upload-time = "2026-04-25T11:08:57.777Z" }, + { url = "https://files.pythonhosted.org/packages/91/8c/2254e2d06c3ac5e6fe22eaf3da791b87ea823ae9f2c17b4af66755c5752d/xxhash-3.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:4e15cc9e2817f6481160f930c62842b3ff419e20e13072bcbab12230943092bc", size = 213457, upload-time = "2026-04-25T11:08:59.826Z" }, + { url = "https://files.pythonhosted.org/packages/79/a2/e3daa762545921173e3360f3b4ff7fc63c2d27359f7230ec1a7a74e117f6/xxhash-3.7.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:90b9d1a8bd37d768ffc92a1f651ec69afc532a96fa1ac2ea7abbed5d630b3237", size = 277738, upload-time = "2026-04-25T11:09:01.423Z" }, + { url = "https://files.pythonhosted.org/packages/e1/4c/e186da2c46b87f5204640e008d42730bf3c1ee9f0efb71ae1ebcdfeac681/xxhash-3.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:157c49475b34ecea8809e51123d9769a534e139d1247942f7a4bc67710bb2533", size = 417127, upload-time = "2026-04-25T11:09:03.592Z" }, + { url = "https://files.pythonhosted.org/packages/17/28/3798e15007a3712d0da3d3fe70f8e11916569858b5cc371053bc26270832/xxhash-3.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5a6ddec83325685e729ca119d1f5c518ec39294212ecd770e60693cdc5f7eb79", size = 193962, upload-time = "2026-04-25T11:09:06.228Z" }, + { url = "https://files.pythonhosted.org/packages/ad/95/a26baa93b5241fd7630998816a4ec47a5a0bad193b3f8fc8f3593e1a4a67/xxhash-3.7.0-cp314-cp314t-win32.whl", hash = "sha256:a04a6cab47e2166435aaf5b9e5ee41d1532cc8300efdef87f2a4d0acb7db19ed", size = 31643, upload-time = "2026-04-25T11:09:08.153Z" }, + { url = "https://files.pythonhosted.org/packages/44/36/5454f13c447e395f9b06a3e91274c59f503d31fad84e1836efe3bdb71f6a/xxhash-3.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8653dd7c2eda020545bb2c71c7f7039b53fe7434d0fc1a0a9deb79ab3f1a4fc1", size = 32522, upload-time = "2026-04-25T11:09:09.534Z" }, + { url = "https://files.pythonhosted.org/packages/74/35/698e7e3ff38e22992ea24870a511d8762474fb6783627a2910ff22a185c2/xxhash-3.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:468f0fc114faaa4b36699f8e328bbc3bb11dc418ba94ac52c26dd736d4b6c637", size = 28807, upload-time = "2026-04-25T11:09:11.234Z" }, ] [[package]] name = "yarl" -version = "1.22.0" +version = "1.23.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "multidict" }, { name = "propcache" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } +sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, - { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, - { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" }, - { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" }, - { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" }, - { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" }, - { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" }, - { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" }, - { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" }, - { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, - { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, - { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, - { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, - { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, - { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, - { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" }, - { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" }, - { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" }, - { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" }, - { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" }, - { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" }, - { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" }, - { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" }, - { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" }, - { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" }, - { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" }, - { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" }, - { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" }, - { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" }, - { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" }, - { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" }, - { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" }, - { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" }, - { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" }, - { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" }, - { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" }, - { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" }, - { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" }, - { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" }, - { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" }, - { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" }, - { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" }, - { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" }, - { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, - { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, - { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, - { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520, upload-time = "2025-10-06T14:11:15.465Z" }, - { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504, upload-time = "2025-10-06T14:11:17.106Z" }, - { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282, upload-time = "2025-10-06T14:11:19.064Z" }, - { url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080, upload-time = "2025-10-06T14:11:20.996Z" }, - { url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696, upload-time = "2025-10-06T14:11:22.847Z" }, - { url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121, upload-time = "2025-10-06T14:11:24.889Z" }, - { url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080, upload-time = "2025-10-06T14:11:27.307Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661, upload-time = "2025-10-06T14:11:29.387Z" }, - { url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645, upload-time = "2025-10-06T14:11:31.423Z" }, - { url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361, upload-time = "2025-10-06T14:11:33.055Z" }, - { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451, upload-time = "2025-10-06T14:11:35.136Z" }, - { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814, upload-time = "2025-10-06T14:11:37.094Z" }, - { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799, upload-time = "2025-10-06T14:11:38.83Z" }, - { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990, upload-time = "2025-10-06T14:11:40.624Z" }, - { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292, upload-time = "2025-10-06T14:11:42.578Z" }, - { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888, upload-time = "2025-10-06T14:11:44.863Z" }, - { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223, upload-time = "2025-10-06T14:11:46.796Z" }, - { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981, upload-time = "2025-10-06T14:11:48.845Z" }, - { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303, upload-time = "2025-10-06T14:11:50.897Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820, upload-time = "2025-10-06T14:11:52.549Z" }, - { url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203, upload-time = "2025-10-06T14:11:54.225Z" }, - { url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173, upload-time = "2025-10-06T14:11:56.069Z" }, - { url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562, upload-time = "2025-10-06T14:11:58.783Z" }, - { url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828, upload-time = "2025-10-06T14:12:00.686Z" }, - { url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551, upload-time = "2025-10-06T14:12:02.628Z" }, - { url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512, upload-time = "2025-10-06T14:12:04.871Z" }, - { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400, upload-time = "2025-10-06T14:12:06.624Z" }, - { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140, upload-time = "2025-10-06T14:12:08.362Z" }, - { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473, upload-time = "2025-10-06T14:12:10.994Z" }, - { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" }, - { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" }, - { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, + { url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" }, + { url = "https://files.pythonhosted.org/packages/99/30/58260ed98e6ff7f90ba84442c1ddd758c9170d70327394a6227b310cd60f/yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8", size = 97587, upload-time = "2026-03-01T22:05:17.384Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/8b08aac08b50682e65759f7f8dde98ae8168f72487e7357a5d684c581ef9/yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072", size = 92528, upload-time = "2026-03-01T22:05:18.804Z" }, + { url = "https://files.pythonhosted.org/packages/52/07/0b7179101fe5f8385ec6c6bb5d0cb9f76bd9fb4a769591ab6fb5cdbfc69a/yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8", size = 105339, upload-time = "2026-03-01T22:05:20.235Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/36d82869ab5ec829ca8574dfcb92b51286fcfb1e9c7a73659616362dc880/yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7", size = 105061, upload-time = "2026-03-01T22:05:22.268Z" }, + { url = "https://files.pythonhosted.org/packages/66/3e/868e5c3364b6cee19ff3e1a122194fa4ce51def02c61023970442162859e/yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51", size = 100132, upload-time = "2026-03-01T22:05:23.638Z" }, + { url = "https://files.pythonhosted.org/packages/cf/26/9c89acf82f08a52cb52d6d39454f8d18af15f9d386a23795389d1d423823/yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67", size = 99289, upload-time = "2026-03-01T22:05:25.749Z" }, + { url = "https://files.pythonhosted.org/packages/6f/54/5b0db00d2cb056922356104468019c0a132e89c8d3ab67d8ede9f4483d2a/yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7", size = 96950, upload-time = "2026-03-01T22:05:27.318Z" }, + { url = "https://files.pythonhosted.org/packages/f6/40/10fa93811fd439341fad7e0718a86aca0de9548023bbb403668d6555acab/yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d", size = 93960, upload-time = "2026-03-01T22:05:28.738Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/8ae2e6cd77d0805f4526e30ec43b6f9a3dfc542d401ac4990d178e4bf0cf/yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760", size = 104703, upload-time = "2026-03-01T22:05:30.438Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0c/b3ceacf82c3fe21183ce35fa2acf5320af003d52bc1fcf5915077681142e/yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2", size = 98325, upload-time = "2026-03-01T22:05:31.835Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e0/12900edd28bdab91a69bd2554b85ad7b151f64e8b521fe16f9ad2f56477a/yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86", size = 105067, upload-time = "2026-03-01T22:05:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/15/61/74bb1182cf79c9bbe4eb6b1f14a57a22d7a0be5e9cedf8e2d5c2086474c3/yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34", size = 100285, upload-time = "2026-03-01T22:05:35.4Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/cd5ef733f2550de6241bd8bd8c3febc78158b9d75f197d9c7baa113436af/yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d", size = 82359, upload-time = "2026-03-01T22:05:36.811Z" }, + { url = "https://files.pythonhosted.org/packages/f5/be/25216a49daeeb7af2bec0db22d5e7df08ed1d7c9f65d78b14f3b74fd72fc/yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e", size = 87674, upload-time = "2026-03-01T22:05:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/d2/35/aeab955d6c425b227d5b7247eafb24f2653fedc32f95373a001af5dfeb9e/yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9", size = 81879, upload-time = "2026-03-01T22:05:40.006Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4b/a0a6e5d0ee8a2f3a373ddef8a4097d74ac901ac363eea1440464ccbe0898/yarl-1.23.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:16c6994ac35c3e74fb0ae93323bf8b9c2a9088d55946109489667c510a7d010e", size = 123796, upload-time = "2026-03-01T22:05:41.412Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/8925d68af039b835ae876db5838e82e76ec87b9782ecc97e192b809c4831/yarl-1.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a42e651629dafb64fd5b0286a3580613702b5809ad3f24934ea87595804f2c5", size = 86547, upload-time = "2026-03-01T22:05:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/50/06d511cc4b8e0360d3c94af051a768e84b755c5eb031b12adaaab6dec6e5/yarl-1.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c6b9461a2a8b47c65eef63bb1c76a4f1c119618ffa99ea79bc5bb1e46c5821b", size = 85854, upload-time = "2026-03-01T22:05:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f4/4e30b250927ffdab4db70da08b9b8d2194d7c7b400167b8fbeca1e4701ca/yarl-1.23.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2569b67d616eab450d262ca7cb9f9e19d2f718c70a8b88712859359d0ab17035", size = 98351, upload-time = "2026-03-01T22:05:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/86/fc/4118c5671ea948208bdb1492d8b76bdf1453d3e73df051f939f563e7dcc5/yarl-1.23.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e9d9a4d06d3481eab79803beb4d9bd6f6a8e781ec078ac70d7ef2dcc29d1bea5", size = 92711, upload-time = "2026-03-01T22:05:48.316Z" }, + { url = "https://files.pythonhosted.org/packages/56/11/1ed91d42bd9e73c13dc9e7eb0dd92298d75e7ac4dd7f046ad0c472e231cd/yarl-1.23.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f514f6474e04179d3d33175ed3f3e31434d3130d42ec153540d5b157deefd735", size = 106014, upload-time = "2026-03-01T22:05:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/ce/c9/74e44e056a23fbc33aca71779ef450ca648a5bc472bdad7a82339918f818/yarl-1.23.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fda207c815b253e34f7e1909840fd14299567b1c0eb4908f8c2ce01a41265401", size = 105557, upload-time = "2026-03-01T22:05:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/fe/b1e10b08d287f518994f1e2ff9b6d26f0adeecd8dd7d533b01bab29a3eda/yarl-1.23.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6cf500e61c90f305094911f9acc9c86da1a05a7a3f5be9f68817043f486e4", size = 101559, upload-time = "2026-03-01T22:05:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/72/59/c5b8d94b14e3d3c2a9c20cb100119fd534ab5a14b93673ab4cc4a4141ea5/yarl-1.23.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7504f2b476d21653e4d143f44a175f7f751cd41233525312696c76aa3dbb23f", size = 100502, upload-time = "2026-03-01T22:05:54.954Z" }, + { url = "https://files.pythonhosted.org/packages/77/4f/96976cb54cbfc5c9fd73ed4c51804f92f209481d1fb190981c0f8a07a1d7/yarl-1.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:578110dd426f0d209d1509244e6d4a3f1a3e9077655d98c5f22583d63252a08a", size = 98027, upload-time = "2026-03-01T22:05:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/63/6e/904c4f476471afdbad6b7e5b70362fb5810e35cd7466529a97322b6f5556/yarl-1.23.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:609d3614d78d74ebe35f54953c5bbd2ac647a7ddb9c30a5d877580f5e86b22f2", size = 95369, upload-time = "2026-03-01T22:05:58.141Z" }, + { url = "https://files.pythonhosted.org/packages/9d/40/acfcdb3b5f9d68ef499e39e04d25e141fe90661f9d54114556cf83be8353/yarl-1.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4966242ec68afc74c122f8459abd597afd7d8a60dc93d695c1334c5fd25f762f", size = 105565, upload-time = "2026-03-01T22:06:00.286Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c6/31e28f3a6ba2869c43d124f37ea5260cac9c9281df803c354b31f4dd1f3c/yarl-1.23.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0fd068364a6759bc794459f0a735ab151d11304346332489c7972bacbe9e72b", size = 99813, upload-time = "2026-03-01T22:06:01.712Z" }, + { url = "https://files.pythonhosted.org/packages/08/1f/6f65f59e72d54aa467119b63fc0b0b1762eff0232db1f4720cd89e2f4a17/yarl-1.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:39004f0ad156da43e86aa71f44e033de68a44e5a31fc53507b36dd253970054a", size = 105632, upload-time = "2026-03-01T22:06:03.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c4/18b178a69935f9e7a338127d5b77d868fdc0f0e49becd286d51b3a18c61d/yarl-1.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5723c01a56c5028c807c701aa66722916d2747ad737a046853f6c46f4875543", size = 101895, upload-time = "2026-03-01T22:06:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/8f/54/f5b870b5505663911dba950a8e4776a0dbd51c9c54c0ae88e823e4b874a0/yarl-1.23.0-cp313-cp313-win32.whl", hash = "sha256:1b6b572edd95b4fa8df75de10b04bc81acc87c1c7d16bcdd2035b09d30acc957", size = 82356, upload-time = "2026-03-01T22:06:06.04Z" }, + { url = "https://files.pythonhosted.org/packages/7a/84/266e8da36879c6edcd37b02b547e2d9ecdfea776be49598e75696e3316e1/yarl-1.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:baaf55442359053c7d62f6f8413a62adba3205119bcb6f49594894d8be47e5e3", size = 87515, upload-time = "2026-03-01T22:06:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/00/fd/7e1c66efad35e1649114fa13f17485f62881ad58edeeb7f49f8c5e748bf9/yarl-1.23.0-cp313-cp313-win_arm64.whl", hash = "sha256:fb4948814a2a98e3912505f09c9e7493b1506226afb1f881825368d6fb776ee3", size = 81785, upload-time = "2026-03-01T22:06:10.181Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fc/119dd07004f17ea43bb91e3ece6587759edd7519d6b086d16bfbd3319982/yarl-1.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:aecfed0b41aa72b7881712c65cf764e39ce2ec352324f5e0837c7048d9e6daaa", size = 130719, upload-time = "2026-03-01T22:06:11.708Z" }, + { url = "https://files.pythonhosted.org/packages/e6/0d/9f2348502fbb3af409e8f47730282cd6bc80dec6630c1e06374d882d6eb2/yarl-1.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a41bcf68efd19073376eb8cf948b8d9be0af26256403e512bb18f3966f1f9120", size = 89690, upload-time = "2026-03-01T22:06:13.429Z" }, + { url = "https://files.pythonhosted.org/packages/50/93/e88f3c80971b42cfc83f50a51b9d165a1dbf154b97005f2994a79f212a07/yarl-1.23.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cde9a2ecd91668bcb7f077c4966d8ceddb60af01b52e6e3e2680e4cf00ad1a59", size = 89851, upload-time = "2026-03-01T22:06:15.53Z" }, + { url = "https://files.pythonhosted.org/packages/1c/07/61c9dd8ba8f86473263b4036f70fb594c09e99c0d9737a799dfd8bc85651/yarl-1.23.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5023346c4ee7992febc0068e7593de5fa2bf611848c08404b35ebbb76b1b0512", size = 95874, upload-time = "2026-03-01T22:06:17.553Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e9/f9ff8ceefba599eac6abddcfb0b3bee9b9e636e96dbf54342a8577252379/yarl-1.23.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1009abedb49ae95b136a8904a3f71b342f849ffeced2d3747bf29caeda218c4", size = 88710, upload-time = "2026-03-01T22:06:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/eb/78/0231bfcc5d4c8eec220bc2f9ef82cb4566192ea867a7c5b4148f44f6cbcd/yarl-1.23.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a8d00f29b42f534cc8aa3931cfe773b13b23e561e10d2b26f27a8d309b0e82a1", size = 101033, upload-time = "2026-03-01T22:06:21.203Z" }, + { url = "https://files.pythonhosted.org/packages/cd/9b/30ea5239a61786f18fd25797151a17fbb3be176977187a48d541b5447dd4/yarl-1.23.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:95451e6ce06c3e104556d73b559f5da6c34a069b6b62946d3ad66afcd51642ea", size = 100817, upload-time = "2026-03-01T22:06:22.738Z" }, + { url = "https://files.pythonhosted.org/packages/62/e2/a4980481071791bc83bce2b7a1a1f7adcabfa366007518b4b845e92eeee3/yarl-1.23.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531ef597132086b6cf96faa7c6c1dcd0361dd5f1694e5cc30375907b9b7d3ea9", size = 97482, upload-time = "2026-03-01T22:06:24.21Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1e/304a00cf5f6100414c4b5a01fc7ff9ee724b62158a08df2f8170dfc72a2d/yarl-1.23.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:88f9fb0116fbfcefcab70f85cf4b74a2b6ce5d199c41345296f49d974ddb4123", size = 95949, upload-time = "2026-03-01T22:06:25.697Z" }, + { url = "https://files.pythonhosted.org/packages/68/03/093f4055ed4cae649ac53bca3d180bd37102e9e11d048588e9ab0c0108d0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e7b0460976dc75cb87ad9cc1f9899a4b97751e7d4e77ab840fc9b6d377b8fd24", size = 95839, upload-time = "2026-03-01T22:06:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/b9/28/4c75ebb108f322aa8f917ae10a8ffa4f07cae10a8a627b64e578617df6a0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:115136c4a426f9da976187d238e84139ff6b51a20839aa6e3720cd1026d768de", size = 90696, upload-time = "2026-03-01T22:06:29.048Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/42c2e2dd91c1a570402f51bdf066bfdb1241c2240ba001967bad778e77b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ead11956716a940c1abc816b7df3fa2b84d06eaed8832ca32f5c5e058c65506b", size = 100865, upload-time = "2026-03-01T22:06:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/74/05/1bcd60a8a0a914d462c305137246b6f9d167628d73568505fce3f1cb2e65/yarl-1.23.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:fe8f8f5e70e6dbdfca9882cd9deaac058729bcf323cf7a58660901e55c9c94f6", size = 96234, upload-time = "2026-03-01T22:06:32.692Z" }, + { url = "https://files.pythonhosted.org/packages/90/b2/f52381aac396d6778ce516b7bc149c79e65bfc068b5de2857ab69eeea3b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a0e317df055958a0c1e79e5d2aa5a5eaa4a6d05a20d4b0c9c3f48918139c9fc6", size = 100295, upload-time = "2026-03-01T22:06:34.268Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/638bae5bbf1113a659b2435d8895474598afe38b4a837103764f603aba56/yarl-1.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f0fd84de0c957b2d280143522c4f91a73aada1923caee763e24a2b3fda9f8a5", size = 97784, upload-time = "2026-03-01T22:06:35.864Z" }, + { url = "https://files.pythonhosted.org/packages/80/25/a3892b46182c586c202629fc2159aa13975d3741d52ebd7347fd501d48d5/yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595", size = 88313, upload-time = "2026-03-01T22:06:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/43/68/8c5b36aa5178900b37387937bc2c2fe0e9505537f713495472dcf6f6fccc/yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090", size = 94932, upload-time = "2026-03-01T22:06:39.579Z" }, + { url = "https://files.pythonhosted.org/packages/c6/cc/d79ba8292f51f81f4dc533a8ccfb9fc6992cabf0998ed3245de7589dc07c/yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144", size = 84786, upload-time = "2026-03-01T22:06:41.988Z" }, + { url = "https://files.pythonhosted.org/packages/90/98/b85a038d65d1b92c3903ab89444f48d3cee490a883477b716d7a24b1a78c/yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912", size = 124455, upload-time = "2026-03-01T22:06:43.615Z" }, + { url = "https://files.pythonhosted.org/packages/39/54/bc2b45559f86543d163b6e294417a107bb87557609007c007ad889afec18/yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474", size = 86752, upload-time = "2026-03-01T22:06:45.425Z" }, + { url = "https://files.pythonhosted.org/packages/24/f9/e8242b68362bffe6fb536c8db5076861466fc780f0f1b479fc4ffbebb128/yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719", size = 86291, upload-time = "2026-03-01T22:06:46.974Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d8/d1cb2378c81dd729e98c716582b1ccb08357e8488e4c24714658cc6630e8/yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319", size = 99026, upload-time = "2026-03-01T22:06:48.459Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ff/7196790538f31debe3341283b5b0707e7feb947620fc5e8236ef28d44f72/yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434", size = 92355, upload-time = "2026-03-01T22:06:50.306Z" }, + { url = "https://files.pythonhosted.org/packages/c1/56/25d58c3eddde825890a5fe6aa1866228377354a3c39262235234ab5f616b/yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723", size = 106417, upload-time = "2026-03-01T22:06:52.1Z" }, + { url = "https://files.pythonhosted.org/packages/51/8a/882c0e7bc8277eb895b31bce0138f51a1ba551fc2e1ec6753ffc1e7c1377/yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039", size = 106422, upload-time = "2026-03-01T22:06:54.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/2b/fef67d616931055bf3d6764885990a3ac647d68734a2d6a9e1d13de437a2/yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52", size = 101915, upload-time = "2026-03-01T22:06:55.895Z" }, + { url = "https://files.pythonhosted.org/packages/18/6a/530e16aebce27c5937920f3431c628a29a4b6b430fab3fd1c117b26ff3f6/yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c", size = 100690, upload-time = "2026-03-01T22:06:58.21Z" }, + { url = "https://files.pythonhosted.org/packages/88/08/93749219179a45e27b036e03260fda05190b911de8e18225c294ac95bbc9/yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae", size = 98750, upload-time = "2026-03-01T22:06:59.794Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cf/ea424a004969f5d81a362110a6ac1496d79efdc6d50c2c4b2e3ea0fc2519/yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e", size = 94685, upload-time = "2026-03-01T22:07:01.375Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b7/14341481fe568e2b0408bcf1484c652accafe06a0ade9387b5d3fd9df446/yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85", size = 106009, upload-time = "2026-03-01T22:07:03.151Z" }, + { url = "https://files.pythonhosted.org/packages/0a/e6/5c744a9b54f4e8007ad35bce96fbc9218338e84812d36f3390cea616881a/yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd", size = 100033, upload-time = "2026-03-01T22:07:04.701Z" }, + { url = "https://files.pythonhosted.org/packages/0c/23/e3bfc188d0b400f025bc49d99793d02c9abe15752138dcc27e4eaf0c4a9e/yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6", size = 106483, upload-time = "2026-03-01T22:07:06.231Z" }, + { url = "https://files.pythonhosted.org/packages/72/42/f0505f949a90b3f8b7a363d6cbdf398f6e6c58946d85c6d3a3bc70595b26/yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe", size = 102175, upload-time = "2026-03-01T22:07:08.4Z" }, + { url = "https://files.pythonhosted.org/packages/aa/65/b39290f1d892a9dd671d1c722014ca062a9c35d60885d57e5375db0404b5/yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169", size = 83871, upload-time = "2026-03-01T22:07:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5b/9b92f54c784c26e2a422e55a8d2607ab15b7ea3349e28359282f84f01d43/yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70", size = 89093, upload-time = "2026-03-01T22:07:11.501Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7d/8a84dc9381fd4412d5e7ff04926f9865f6372b4c2fd91e10092e65d29eb8/yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e", size = 83384, upload-time = "2026-03-01T22:07:13.069Z" }, + { url = "https://files.pythonhosted.org/packages/dd/8d/d2fad34b1c08aa161b74394183daa7d800141aaaee207317e82c790b418d/yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679", size = 131019, upload-time = "2026-03-01T22:07:14.903Z" }, + { url = "https://files.pythonhosted.org/packages/19/ff/33009a39d3ccf4b94d7d7880dfe17fb5816c5a4fe0096d9b56abceea9ac7/yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412", size = 89894, upload-time = "2026-03-01T22:07:17.372Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f1/dab7ac5e7306fb79c0190766a3c00b4cb8d09a1f390ded68c85a5934faf5/yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4", size = 89979, upload-time = "2026-03-01T22:07:19.361Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b1/08e95f3caee1fad6e65017b9f26c1d79877b502622d60e517de01e72f95d/yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c", size = 95943, upload-time = "2026-03-01T22:07:21.266Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cc/6409f9018864a6aa186c61175b977131f373f1988e198e031236916e87e4/yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4", size = 88786, upload-time = "2026-03-01T22:07:23.129Z" }, + { url = "https://files.pythonhosted.org/packages/76/40/cc22d1d7714b717fde2006fad2ced5efe5580606cb059ae42117542122f3/yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94", size = 101307, upload-time = "2026-03-01T22:07:24.689Z" }, + { url = "https://files.pythonhosted.org/packages/8f/0d/476c38e85ddb4c6ec6b20b815bdd779aa386a013f3d8b85516feee55c8dc/yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28", size = 100904, upload-time = "2026-03-01T22:07:26.287Z" }, + { url = "https://files.pythonhosted.org/packages/72/32/0abe4a76d59adf2081dcb0397168553ece4616ada1c54d1c49d8936c74f8/yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6", size = 97728, upload-time = "2026-03-01T22:07:27.906Z" }, + { url = "https://files.pythonhosted.org/packages/b7/35/7b30f4810fba112f60f5a43237545867504e15b1c7647a785fbaf588fac2/yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277", size = 95964, upload-time = "2026-03-01T22:07:30.198Z" }, + { url = "https://files.pythonhosted.org/packages/2d/86/ed7a73ab85ef00e8bb70b0cb5421d8a2a625b81a333941a469a6f4022828/yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4", size = 95882, upload-time = "2026-03-01T22:07:32.132Z" }, + { url = "https://files.pythonhosted.org/packages/19/90/d56967f61a29d8498efb7afb651e0b2b422a1e9b47b0ab5f4e40a19b699b/yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a", size = 90797, upload-time = "2026-03-01T22:07:34.404Z" }, + { url = "https://files.pythonhosted.org/packages/72/00/8b8f76909259f56647adb1011d7ed8b321bcf97e464515c65016a47ecdf0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb", size = 101023, upload-time = "2026-03-01T22:07:35.953Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e2/cab11b126fb7d440281b7df8e9ddbe4851e70a4dde47a202b6642586b8d9/yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41", size = 96227, upload-time = "2026-03-01T22:07:37.594Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9b/2c893e16bfc50e6b2edf76c1a9eb6cb0c744346197e74c65e99ad8d634d0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2", size = 100302, upload-time = "2026-03-01T22:07:39.334Z" }, + { url = "https://files.pythonhosted.org/packages/28/ec/5498c4e3a6d5f1003beb23405671c2eb9cdbf3067d1c80f15eeafe301010/yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4", size = 98202, upload-time = "2026-03-01T22:07:41.717Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c3/cd737e2d45e70717907f83e146f6949f20cc23cd4bf7b2688727763aa458/yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4", size = 90558, upload-time = "2026-03-01T22:07:43.433Z" }, + { url = "https://files.pythonhosted.org/packages/e1/19/3774d162f6732d1cfb0b47b4140a942a35ca82bb19b6db1f80e9e7bdc8f8/yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2", size = 97610, upload-time = "2026-03-01T22:07:45.773Z" }, + { url = "https://files.pythonhosted.org/packages/51/47/3fa2286c3cb162c71cdb34c4224d5745a1ceceb391b2bd9b19b668a8d724/yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25", size = 86041, upload-time = "2026-03-01T22:07:49.026Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" }, ] [[package]] @@ -4298,11 +4781,11 @@ wheels = [ [[package]] name = "zipp" -version = "3.23.0" +version = "3.23.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +sdist = { url = "https://files.pythonhosted.org/packages/30/21/093488dfc7cc8964ded15ab726fad40f25fd3d788fd741cc1c5a17d78ee8/zipp-3.23.1.tar.gz", hash = "sha256:32120e378d32cd9714ad503c1d024619063ec28aad2248dc6672ad13edfa5110", size = 25965, upload-time = "2026-04-13T23:21:46.6Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/08/8a/0861bec20485572fbddf3dfba2910e38fe249796cb73ecdeb74e07eeb8d3/zipp-3.23.1-py3-none-any.whl", hash = "sha256:0b3596c50a5c700c9cb40ba8d86d9f2cc4807e9bedb06bcdf7fac85633e444dc", size = 10378, upload-time = "2026-04-13T23:21:45.386Z" }, ] [[package]] diff --git a/config.example.yaml b/config.example.yaml index fc3d9c8c1..a14ca5886 100644 --- a/config.example.yaml +++ b/config.example.yaml @@ -12,7 +12,7 @@ # ============================================================================ # Bump this number when the config schema changes. # Run `make config-upgrade` to merge new fields into your local config.yaml. -config_version: 7 +config_version: 8 # ============================================================================ # Logging @@ -326,6 +326,27 @@ models: # chat_template_kwargs: # enable_thinking: true + + # Example: Qwen3-Coder deployed on MindIE Engine + # - name: Qwen3_Coder_480B_MindIE + # display_name: Qwen3-Coder-480B (MindIE) + # use: deerflow.models.mindie_provider:MindIEChatModel + # model: Qwen3-Coder-480B-A35B-Instruct-Client + # base_url: http://localhost:8989/v1 + # api_key: $OPENAI_API_KEY + # temperature: 0 + # max_retries: 1 + # supports_thinking: false + # supports_vision: false + # supports_reasoning_effort: false + # # --- Advanced Network Settings --- + # # Due to MindIE's streaming limitations with tool calling, the provider + # # uses mock-streaming (awaiting full generation). Extended timeouts are required. + # read_timeout: 900.0 # 15 minutes to prevent drops during long document generation + # connect_timeout: 30.0 + # write_timeout: 60.0 + # pool_timeout: 30.0 + # ============================================================================ # Tool Groups Configuration # ============================================================================ @@ -577,15 +598,41 @@ sandbox: # # Optional global max-turn override for all subagents # # max_turns: 120 # -# # Optional per-agent overrides +# # Optional per-agent overrides (applies to both built-in and custom agents) # agents: # general-purpose: # timeout_seconds: 1800 # 30 minutes for complex multi-step tasks # max_turns: 160 # # model: qwen3:32b # Use a specific model (default: inherit from lead agent) +# # skills: # Skill whitelist (default: inherit all enabled skills) +# # - web-search +# # - data-analysis # bash: # timeout_seconds: 300 # 5 minutes for quick command execution # max_turns: 80 +# # skills: [] # No skills for bash agent +# +# # Custom subagent types: define specialized agents with their own prompts, +# # tools, skills, and model configuration. Custom agents are available via +# # the `task` tool alongside built-in types (general-purpose, bash). +# # custom_agents: +# # analysis: +# # description: "Data analysis specialist for processing datasets and generating insights" +# # system_prompt: | +# # You are a data analysis subagent. Focus on: +# # - Processing and analyzing datasets +# # - Generating visualizations +# # - Providing statistical insights +# # tools: # Tool whitelist (null = inherit all) +# # - bash +# # - read_file +# # - write_file +# # skills: # Skill whitelist (null = inherit all, [] = none) +# # - data-analysis +# # - visualization +# # model: inherit # 'inherit' uses parent's model +# # max_turns: 80 +# # timeout_seconds: 600 # # # Model override: by default, subagents inherit the lead agent's model. # # Set `model` to use a different model (e.g., a local Ollama model for cost savings). @@ -700,6 +747,19 @@ summarization: # The prompt should guide the model to extract important context summary_prompt: null + # Recently-loaded skill files are excluded from summarization so the agent + # does not lose skill instructions after a compression pass. Claude Code uses + # a similar strategy (keep the most recent ~5 skills, ~25k total tokens, with + # a ~5k cap per skill). Set preserve_recent_skill_count to 0 to disable. + preserve_recent_skill_count: 5 + preserve_recent_skill_tokens: 25000 + preserve_recent_skill_tokens_per_skill: 5000 + skill_file_read_tool_names: + - read_file + - read + - view + - cat + # ============================================================================ # Memory Configuration # ============================================================================ @@ -728,42 +788,83 @@ agents_api: # ============================================================================ # Allow the agent to autonomously create and improve skills in skills/custom/. skill_evolution: - enabled: false # Set to true to allow agent-managed writes under skills/custom - moderation_model_name: null # Model for LLM-based security scanning (null = use default model) + enabled: false # Set to true to allow agent-managed writes under skills/custom + moderation_model_name: null # Model for LLM-based security scanning (null = use default model) # ============================================================================ -# Checkpointer Configuration +# Checkpointer Configuration (DEPRECATED — use `database` instead) # ============================================================================ -# Configure state persistence for the embedded DeerFlowClient. -# The LangGraph Server manages its own state persistence separately -# via the server infrastructure (this setting does not affect it). +# Legacy standalone checkpointer config. Kept for backward compatibility. +# Prefer the unified `database` section below, which drives BOTH the +# LangGraph checkpointer AND DeerFlow application data (runs, feedback, +# events) from a single backend setting. # -# When configured, DeerFlowClient will automatically use this checkpointer, -# enabling multi-turn conversations to persist across process restarts. +# If both `checkpointer` and `database` are present, `checkpointer` +# takes precedence for LangGraph state persistence only. # -# Supported types: -# memory - In-process only. State is lost when the process exits. (default) -# sqlite - File-based SQLite persistence. Survives restarts. -# Requires: uv add langgraph-checkpoint-sqlite -# postgres - PostgreSQL persistence. Suitable for multi-process deployments. -# Requires: uv add langgraph-checkpoint-postgres psycopg[binary] psycopg-pool -# -# Examples: -# -# In-memory (default when omitted — no persistence): # checkpointer: -# type: memory +# type: sqlite +# connection_string: checkpoints.db # -# SQLite (file-based, single-process): -checkpointer: - type: sqlite - connection_string: checkpoints.db -# -# PostgreSQL (multi-process, production): # checkpointer: # type: postgres # connection_string: postgresql://user:password@localhost:5432/deerflow +# ============================================================================ +# Database +# ============================================================================ +# Unified storage backend for LangGraph checkpointer and DeerFlow +# application data (runs, threads metadata, feedback, etc.). +# +# backend: memory -- No persistence, data lost on restart +# backend: sqlite -- Single-node deployment, files in sqlite_dir +# backend: postgres -- Production multi-node deployment +# +# If this section is omitted or empty in config.yaml, DeerFlow uses: +# backend: sqlite +# sqlite_dir: .deer-flow/data +# +# SQLite mode uses a single deerflow.db file with WAL journal mode +# for both checkpointer and application data. +# +# Postgres mode: put your connection URL in .env as DATABASE_URL, +# then reference it here with $DATABASE_URL. +# Install the driver first: +# Local: uv sync --extra postgres +# Docker: UV_EXTRAS=postgres docker compose build +# +# NOTE: When both `checkpointer` and `database` are configured, +# `checkpointer` takes precedence for LangGraph state persistence. +# If you use `database`, you can remove the `checkpointer` section. +# database: +# backend: sqlite +# sqlite_dir: .deer-flow/data +# +# database: +# backend: postgres +# postgres_url: $DATABASE_URL +database: + backend: sqlite + sqlite_dir: .deer-flow/data + +# ============================================================================ +# Run Events Configuration +# ============================================================================ +# Storage backend for run events (messages + execution traces). +# +# backend: memory -- No persistence, data lost on restart (default) +# backend: db -- SQL database via ORM, full query capability (production) +# backend: jsonl -- Append-only JSONL files (lightweight single-node persistence) +# +# run_events: +# backend: memory +# max_trace_content: 10240 # Truncation threshold for trace content (db backend, bytes) +# track_token_usage: true # Accumulate token counts to RunRow +run_events: + backend: memory + max_trace_content: 10240 + track_token_usage: true + # ============================================================================ # IM Channels Configuration # ============================================================================ @@ -771,18 +872,18 @@ checkpointer: # All channels use outbound connections (WebSocket or polling) — no public IP required. # channels: -# # LangGraph Server URL for thread/message management (default: http://localhost:2024) +# # LangGraph-compatible Gateway API base URL for thread/message management (default: http://localhost:8001/api) # # For Docker deployments, use the Docker service name instead of localhost: -# # langgraph_url: http://langgraph:2024 +# # langgraph_url: http://gateway:8001/api # # gateway_url: http://gateway:8001 -# langgraph_url: http://localhost:2024 +# langgraph_url: http://localhost:8001/api # # Gateway API URL for auxiliary queries like /models, /memory (default: http://localhost:8001) # gateway_url: http://localhost:8001 # # # # Docker Compose note: # # If channels run inside the gateway container, use container DNS names instead # # of localhost, for example: -# # langgraph_url: http://langgraph:2024 +# # langgraph_url: http://gateway:8001/api # # gateway_url: http://gateway:8001 # # You can also set DEER_FLOW_CHANNELS_LANGGRAPH_URL / DEER_FLOW_CHANNELS_GATEWAY_URL. # @@ -807,7 +908,7 @@ checkpointer: # enabled: false # bot_token: $SLACK_BOT_TOKEN # xoxb-... # app_token: $SLACK_APP_TOKEN # xapp-... (Socket Mode) -# allowed_users: [] # empty = allow all +# allowed_users: [] # empty = allow all; can also be a single Slack user ID string, e.g. U123456, but list form is recommended # # telegram: # enabled: false @@ -863,6 +964,13 @@ checkpointer: # enabled: false # bot_id: $WECOM_BOT_ID # bot_secret: $WECOM_BOT_SECRET +# +# dingtalk: +# enabled: false +# client_id: $DINGTALK_CLIENT_ID +# client_secret: $DINGTALK_CLIENT_SECRET +# allowed_users: [] # empty = allow all +# card_template_id: "" # Optional: AI Card template ID for streaming updates # ============================================================================ # Guardrails Configuration diff --git a/deer-flow.code-workspace b/deer-flow.code-workspace index ef2863302..a4f4cb240 100644 --- a/deer-flow.code-workspace +++ b/deer-flow.code-workspace @@ -5,7 +5,7 @@ } ], "settings": { - "typescript.tsdk": "frontend/node_modules/typescript/lib", + "js/ts.tsdk.path": "frontend/node_modules/typescript/lib", "python-envs.pythonProjects": [ { "path": "backend", @@ -44,4 +44,4 @@ } ] } -} +} \ No newline at end of file diff --git a/docker/docker-compose-dev.yaml b/docker/docker-compose-dev.yaml index 87d19abbe..8fb95124d 100644 --- a/docker/docker-compose-dev.yaml +++ b/docker/docker-compose-dev.yaml @@ -4,8 +4,7 @@ # Services: # - nginx: Reverse proxy (port 2026) # - frontend: Frontend Next.js dev server (port 3000) -# - gateway: Backend Gateway API (port 8001) -# - langgraph: LangGraph server (port 2024) +# - gateway: Backend Gateway API + agent runtime (port 8001) # - provisioner (optional): Sandbox provisioner (creates Pods in host Kubernetes) # # Prerequisites: @@ -61,9 +60,7 @@ services: start_period: 15s # ── Reverse Proxy ────────────────────────────────────────────────────── - # Routes API traffic to gateway/langgraph and (optionally) provisioner. - # LANGGRAPH_UPSTREAM and LANGGRAPH_REWRITE control gateway vs standard - # routing (processed by envsubst at container start). + # Routes API traffic to gateway and (optionally) provisioner. nginx: image: nginx:alpine container_name: deer-flow-nginx @@ -71,16 +68,12 @@ services: - "2026:2026" volumes: - ./nginx/nginx.conf:/etc/nginx/nginx.conf.template:ro - environment: - - LANGGRAPH_UPSTREAM=${LANGGRAPH_UPSTREAM:-langgraph:2024} - - LANGGRAPH_REWRITE=${LANGGRAPH_REWRITE:-/} command: - sh - -c - | set -e - envsubst '$$LANGGRAPH_UPSTREAM $$LANGGRAPH_REWRITE' \ - < /etc/nginx/nginx.conf.template > /etc/nginx/nginx.conf + cp /etc/nginx/nginx.conf.template /etc/nginx/nginx.conf test -e /proc/net/if_inet6 || sed -i '/^[[:space:]]*listen[[:space:]]\+\[::\]:2026;/d' /etc/nginx/nginx.conf exec nginx -g 'daemon off;' depends_on: @@ -114,7 +107,6 @@ services: - WATCHPACK_POLLING=true - CI=true - DEER_FLOW_INTERNAL_GATEWAY_BASE_URL=http://gateway:8001 - - DEER_FLOW_INTERNAL_LANGGRAPH_BASE_URL=http://langgraph:2024 env_file: - ../frontend/.env networks: @@ -147,7 +139,7 @@ services: # On macOS/Docker Desktop, uv may fail to create symlinks inside shared # host directories, which causes startup-time `uv sync` to crash. - gateway-uv-cache:/root/.cache/uv - # DooD: same as gateway — AioSandboxProvider runs inside LangGraph process. + # DooD: AioSandboxProvider runs inside the Gateway process. - /var/run/docker.sock:/var/run/docker.sock # CLI auth directories for auto-auth (Claude Code + Codex CLI) - type: bind @@ -166,7 +158,7 @@ services: environment: - CI=true - DEER_FLOW_HOME=/app/backend/.deer-flow - - DEER_FLOW_CHANNELS_LANGGRAPH_URL=${DEER_FLOW_CHANNELS_LANGGRAPH_URL:-http://langgraph:2024} + - DEER_FLOW_CHANNELS_LANGGRAPH_URL=${DEER_FLOW_CHANNELS_LANGGRAPH_URL:-http://gateway:8001/api} - DEER_FLOW_CHANNELS_GATEWAY_URL=${DEER_FLOW_CHANNELS_GATEWAY_URL:-http://gateway:8001} - DEER_FLOW_HOST_BASE_DIR=${DEER_FLOW_ROOT}/backend/.deer-flow - DEER_FLOW_HOST_SKILLS_PATH=${DEER_FLOW_ROOT}/skills @@ -180,70 +172,11 @@ services: - deer-flow-dev restart: unless-stopped - # Backend - LangGraph Server - langgraph: - build: - context: ../ - dockerfile: backend/Dockerfile - target: dev - # cache_from disabled - requires manual setup: mkdir -p /tmp/docker-cache-langgraph - args: - APT_MIRROR: ${APT_MIRROR:-} - UV_IMAGE: ${UV_IMAGE:-ghcr.io/astral-sh/uv:0.7.20} - UV_INDEX_URL: ${UV_INDEX_URL:-https://pypi.org/simple} - container_name: deer-flow-langgraph - command: sh -c "cd backend && { (uv sync || (echo '[startup] uv sync failed; recreating .venv and retrying once' && uv venv --allow-existing .venv && uv sync)) && allow_blocking='' && if [ \"\${LANGGRAPH_ALLOW_BLOCKING:-0}\" = '1' ]; then allow_blocking='--allow-blocking'; fi && uv run langgraph dev --no-browser \${allow_blocking} --host 0.0.0.0 --port 2024 --n-jobs-per-worker \${LANGGRAPH_JOBS_PER_WORKER:-10}; } > /app/logs/langgraph.log 2>&1" - volumes: - - ../backend/:/app/backend/ - # Preserve the .venv built during Docker image build — mounting the full backend/ - # directory above would otherwise shadow it with the (empty) host directory. - - langgraph-venv:/app/backend/.venv - - ../config.yaml:/app/config.yaml - - ../extensions_config.json:/app/extensions_config.json - - ../skills:/app/skills - - ../logs:/app/logs - # Use a Docker-managed uv cache volume instead of a host bind mount. - # On macOS/Docker Desktop, uv may fail to create symlinks inside shared - # host directories, which causes startup-time `uv sync` to crash. - - langgraph-uv-cache:/root/.cache/uv - # DooD: same as gateway — AioSandboxProvider runs inside LangGraph process. - - /var/run/docker.sock:/var/run/docker.sock - # CLI auth directories for auto-auth (Claude Code + Codex CLI) - - type: bind - source: ${HOME:?HOME must be set}/.claude - target: /root/.claude - read_only: true - bind: - create_host_path: true - - type: bind - source: ${HOME:?HOME must be set}/.codex - target: /root/.codex - read_only: true - bind: - create_host_path: true - working_dir: /app - environment: - - CI=true - - DEER_FLOW_HOME=/app/backend/.deer-flow - - DEER_FLOW_HOST_BASE_DIR=${DEER_FLOW_ROOT}/backend/.deer-flow - - DEER_FLOW_HOST_SKILLS_PATH=${DEER_FLOW_ROOT}/skills - - DEER_FLOW_SANDBOX_HOST=host.docker.internal - env_file: - - ../.env - extra_hosts: - # For Linux: map host.docker.internal to host gateway - - "host.docker.internal:host-gateway" - networks: - - deer-flow-dev - restart: unless-stopped - volumes: # Persist .venv across container restarts so dependencies installed during # image build are not shadowed by the host backend/ directory mount. gateway-venv: - langgraph-venv: gateway-uv-cache: - langgraph-uv-cache: networks: deer-flow-dev: diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 38337c7df..82cb62425 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -4,8 +4,7 @@ # Services: # - nginx: Reverse proxy (port 2026, configurable via PORT env var) # - frontend: Next.js production server -# - gateway: FastAPI Gateway API -# - langgraph: LangGraph production server (Dockerfile generated by langgraph dockerfile) +# - gateway: FastAPI Gateway API + agent runtime # - provisioner: (optional) Sandbox provisioner for Kubernetes mode # # Key environment variables (set via environment/.env or scripts/deploy.sh): @@ -30,12 +29,8 @@ services: - "${PORT:-2026}:2026" volumes: - ./nginx/nginx.conf:/etc/nginx/nginx.conf.template:ro - environment: - - LANGGRAPH_UPSTREAM=${LANGGRAPH_UPSTREAM:-langgraph:2024} - - LANGGRAPH_REWRITE=${LANGGRAPH_REWRITE:-/} command: > - sh -c "envsubst '$$LANGGRAPH_UPSTREAM $$LANGGRAPH_REWRITE' - < /etc/nginx/nginx.conf.template > /etc/nginx/nginx.conf + sh -c "cp /etc/nginx/nginx.conf.template /etc/nginx/nginx.conf && nginx -g 'daemon off;'" depends_on: - frontend @@ -57,7 +52,6 @@ services: environment: - BETTER_AUTH_SECRET=${BETTER_AUTH_SECRET} - DEER_FLOW_INTERNAL_GATEWAY_BASE_URL=http://gateway:8001 - - DEER_FLOW_INTERNAL_LANGGRAPH_BASE_URL=http://langgraph:2024 env_file: - ../frontend/.env networks: @@ -73,6 +67,7 @@ services: APT_MIRROR: ${APT_MIRROR:-} UV_IMAGE: ${UV_IMAGE:-ghcr.io/astral-sh/uv:0.7.20} UV_INDEX_URL: ${UV_INDEX_URL:-https://pypi.org/simple} + UV_EXTRAS: ${UV_EXTRAS:-} container_name: deer-flow-gateway command: sh -c "cd backend && PYTHONPATH=. uv run uvicorn app.gateway.app:app --host 0.0.0.0 --port 8001 --workers ${GATEWAY_WORKERS:-4}" volumes: @@ -101,7 +96,7 @@ services: - DEER_FLOW_HOME=/app/backend/.deer-flow - DEER_FLOW_CONFIG_PATH=/app/backend/config.yaml - DEER_FLOW_EXTENSIONS_CONFIG_PATH=/app/backend/extensions_config.json - - DEER_FLOW_CHANNELS_LANGGRAPH_URL=${DEER_FLOW_CHANNELS_LANGGRAPH_URL:-http://langgraph:2024} + - DEER_FLOW_CHANNELS_LANGGRAPH_URL=${DEER_FLOW_CHANNELS_LANGGRAPH_URL:-http://gateway:8001/api} - DEER_FLOW_CHANNELS_GATEWAY_URL=${DEER_FLOW_CHANNELS_GATEWAY_URL:-http://gateway:8001} # DooD path/network translation - DEER_FLOW_HOST_BASE_DIR=${DEER_FLOW_HOME} @@ -115,57 +110,6 @@ services: - deer-flow restart: unless-stopped - # ── LangGraph Server ─────────────────────────────────────────────────────── - # TODO: switch to langchain/langgraph-api (licensed) once a license key is available. - # For now, use `langgraph dev` (no license required) with the standard backend image. - langgraph: - build: - context: ../ - dockerfile: backend/Dockerfile - args: - APT_MIRROR: ${APT_MIRROR:-} - UV_IMAGE: ${UV_IMAGE:-ghcr.io/astral-sh/uv:0.7.20} - UV_INDEX_URL: ${UV_INDEX_URL:-https://pypi.org/simple} - container_name: deer-flow-langgraph - command: sh -c 'cd /app/backend && args="--no-browser --no-reload --host 0.0.0.0 --port 2024 --n-jobs-per-worker $${LANGGRAPH_JOBS_PER_WORKER:-10}" && if [ "$${LANGGRAPH_ALLOW_BLOCKING:-0}" = "1" ]; then args="$$args --allow-blocking"; fi && uv run langgraph dev $$args' - volumes: - - ${DEER_FLOW_CONFIG_PATH}:/app/backend/config.yaml:ro - - ${DEER_FLOW_EXTENSIONS_CONFIG_PATH}:/app/backend/extensions_config.json:ro - - ${DEER_FLOW_HOME}:/app/backend/.deer-flow - - ../skills:/app/skills:ro - - ../backend/.langgraph_api:/app/backend/.langgraph_api - # DooD: same as gateway - - ${DEER_FLOW_DOCKER_SOCKET}:/var/run/docker.sock - # CLI auth directories for auto-auth (Claude Code + Codex CLI) - - type: bind - source: ${HOME:?HOME must be set}/.claude - target: /root/.claude - read_only: true - bind: - create_host_path: true - - type: bind - source: ${HOME:?HOME must be set}/.codex - target: /root/.codex - read_only: true - bind: - create_host_path: true - environment: - - CI=true - - DEER_FLOW_HOME=/app/backend/.deer-flow - - DEER_FLOW_CONFIG_PATH=/app/backend/config.yaml - - DEER_FLOW_EXTENSIONS_CONFIG_PATH=/app/backend/extensions_config.json - - DEER_FLOW_HOST_BASE_DIR=${DEER_FLOW_HOME} - - DEER_FLOW_HOST_SKILLS_PATH=${DEER_FLOW_REPO_ROOT}/skills - - DEER_FLOW_SANDBOX_HOST=host.docker.internal - # LangSmith tracing: set LANGSMITH_TRACING=true and LANGSMITH_API_KEY in .env to enable. - env_file: - - ../.env - extra_hosts: - - "host.docker.internal:host-gateway" - networks: - - deer-flow - restart: unless-stopped - # ── Sandbox Provisioner (optional, Kubernetes mode) ──────────────────────── provisioner: build: diff --git a/docker/nginx/nginx.conf b/docker/nginx/nginx.conf index c9a7be32b..a5e0eb6b6 100644 --- a/docker/nginx/nginx.conf +++ b/docker/nginx/nginx.conf @@ -26,10 +26,6 @@ http { server gateway:8001; } - upstream langgraph { - server ${LANGGRAPH_UPSTREAM}; - } - upstream frontend { server frontend:3000; } @@ -56,13 +52,11 @@ http { return 204; } - # LangGraph API routes - # In standard mode: /api/langgraph/* → langgraph:2024 (rewrite to /*) - # In gateway mode: /api/langgraph/* → gateway:8001 (rewrite to /api/*) - # Controlled by LANGGRAPH_UPSTREAM and LANGGRAPH_REWRITE env vars. + # LangGraph-compatible API routes served by Gateway. + # Rewrites /api/langgraph/* to /api/* before proxying to Gateway. location /api/langgraph/ { - rewrite ^/api/langgraph/(.*) ${LANGGRAPH_REWRITE}$1 break; - proxy_pass http://langgraph; + rewrite ^/api/langgraph/(.*) /api/$1 break; + proxy_pass http://gateway; proxy_http_version 1.1; # Headers @@ -213,6 +207,17 @@ http { proxy_set_header X-Forwarded-Proto $scheme; } + # Catch-all for /api/ routes not covered above (e.g. /api/v1/auth/*). + # More specific prefix and regex locations above still take precedence. + location /api/ { + proxy_pass http://gateway; + proxy_http_version 1.1; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + # All other requests go to frontend location / { proxy_pass http://frontend; diff --git a/docker/nginx/nginx.local.conf b/docker/nginx/nginx.local.conf index e79508831..473d3f1d1 100644 --- a/docker/nginx/nginx.local.conf +++ b/docker/nginx/nginx.local.conf @@ -19,10 +19,6 @@ http { server 127.0.0.1:8001; } - upstream langgraph { - server 127.0.0.1:2024; - } - upstream frontend { server 127.0.0.1:3000; } @@ -48,38 +44,10 @@ http { return 204; } - # LangGraph API routes (served by langgraph dev) - # Rewrites /api/langgraph/* to /* before proxying to LangGraph server + # LangGraph-compatible API routes served by Gateway. + # Rewrites /api/langgraph/* to /api/* before proxying to Gateway. location /api/langgraph/ { - rewrite ^/api/langgraph/(.*) /$1 break; - proxy_pass http://langgraph; - proxy_http_version 1.1; - - # Headers - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - proxy_set_header Connection ''; - - # SSE/Streaming support - proxy_buffering off; - proxy_cache off; - proxy_set_header X-Accel-Buffering no; - - # Timeouts for long-running requests - proxy_connect_timeout 600s; - proxy_send_timeout 600s; - proxy_read_timeout 600s; - - # Chunked transfer encoding - chunked_transfer_encoding on; - } - - # Experimental: Gateway-backed LangGraph-compatible API - # Frontend can opt-in via NEXT_PUBLIC_LANGGRAPH_BASE_URL=/api/langgraph-compat - location /api/langgraph-compat/ { - rewrite ^/api/langgraph-compat/(.*) /api/$1 break; + rewrite ^/api/langgraph/(.*) /api/$1 break; proxy_pass http://gateway; proxy_http_version 1.1; @@ -218,6 +186,25 @@ http { proxy_set_header X-Forwarded-Proto $scheme; } + # Catch-all for any /api/* prefix not matched by a more specific block above. + # Covers the auth module (/api/v1/auth/login, /me, /change-password, ...), + # plus feedback / runs / token-usage routes that 2.0-rc added without + # updating this nginx config. Longest-prefix matching ensures the explicit + # blocks above (/api/models, /api/threads regex, /api/langgraph/, ...) still + # win for their paths — only truly unmatched /api/* requests land here. + location /api/ { + proxy_pass http://gateway; + proxy_http_version 1.1; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + + # Auth endpoints set HttpOnly cookies — make sure nginx doesn't + # strip the Set-Cookie header from upstream responses. + proxy_pass_header Set-Cookie; + } + # All other requests go to frontend location / { proxy_pass http://frontend; @@ -232,6 +219,23 @@ http { proxy_set_header Connection 'upgrade'; proxy_cache_bypass $http_upgrade; + # Disable response buffering for the frontend. Without this, + # nginx tries to spool large upstream responses (e.g. Next.js + # static chunks) into ``proxy_temp_path``, which defaults to + # the system-owned ``/var/lib/nginx/proxy`` and fails with + # ``[crit] open() ... failed (13: Permission denied)`` when + # nginx is launched as a non-root user (every dev machine + # except production root containers). The symptom on the + # client side is ``ERR_INCOMPLETE_CHUNKED_ENCODING`` and + # ``ChunkLoadError`` partway through page hydration. + # + # Streaming the response straight through avoids the + # temp-file path entirely. The frontend already sets its + # own cache headers, so we don't lose anything from + # disabling nginx-side buffering. + proxy_buffering off; + proxy_request_buffering off; + # Timeouts proxy_connect_timeout 600s; proxy_send_timeout 600s; diff --git a/docs/superpowers/plans/2026-04-10-event-store-history.md b/docs/superpowers/plans/2026-04-10-event-store-history.md new file mode 100644 index 000000000..0e3eb1c35 --- /dev/null +++ b/docs/superpowers/plans/2026-04-10-event-store-history.md @@ -0,0 +1,471 @@ +# Event Store History — Backend Compatibility Layer + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Replace checkpoint state with the append-only event store as the message source in the thread state/history endpoints, so summarization never causes message loss. + +**Architecture:** The Gateway's `get_thread_state` and `get_thread_history` endpoints currently read messages from `checkpoint.channel_values["messages"]`. After summarization, those messages are replaced with a synthetic summary-as-human message and all pre-summarize messages are gone. We modify these endpoints to read messages from the RunEventStore instead (append-only, unaffected by summarization). The response shape for each message stays identical so the chat render path needs no changes, but the frontend's feedback hook must be aligned to use the same full-history view (see Task 4). + +**Tech Stack:** Python (FastAPI, SQLAlchemy), pytest, TypeScript (React Query) + +**Scope:** Gateway mode only (`make dev-pro`). Standard mode uses the LangGraph Server directly and does not go through these endpoints; the summarize bug is still present there and must be tracked as a separate follow-up (see §"Follow-ups" at end of plan). + +**Prerequisite already landed:** `backend/packages/harness/deerflow/runtime/journal.py` now unwraps `Command(update={'messages':[ToolMessage(...)]})` in `on_tool_end`, so new runs that use state-updating tools (e.g. `present_files`) write the inner `ToolMessage` content to the event store instead of `str(Command(...))`. Legacy data captured before this fix is cleaned up defensively by the new helper (see Task 1 Step 3 `_sanitize_legacy_command_repr`). + +--- + +## Real Data Alignment Analysis + +Compared real `POST /history` response (checkpoint-based) with `run_events` table for thread `6d30913e-dcd4-41c8-8941-f66c716cf359` (docs/resp.json + backend/.deer-flow/data/deerflow.db). See `docs/superpowers/specs/2026-04-11-runjournal-history-evaluation.md` for full evidence chain. + +| Message type | Fields compared | Difference | +|-------------|----------------|------------| +| human_message | all fields | `id` is `None` in event store, has UUID in checkpoint | +| ai_message (tool_call) | all fields, 6 overlapping | **IDENTICAL** (0 diffs) | +| ai_message (final) | all fields | **IDENTICAL** | +| tool_result (normal) | all fields | Only `id` differs (`None` vs UUID) | +| tool_result (from `Command`-returning tool) | content | **Legacy data stored `str(Command(...))` repr instead of inner ToolMessage** — fixed in journal.py for new runs; legacy rows sanitized by helper | + +**Root cause for id difference:** LangGraph's checkpoint assigns `id` to HumanMessage and ToolMessage during graph execution. Event store writes happen earlier, when those ids are still None. AI messages receive `id` from the LLM response (`lc_run--*`) and are unaffected. + +**Fix for id:** Generate deterministic UUIDs for `id=None` messages using `uuid5(NAMESPACE_URL, f"{thread_id}:{seq}")` at read time. Patch a **copy** of the content dict, never the live store object. + +**Summarize impact quantified on the reproducer thread**: event_store has 16 messages (7 AI + 9 others); checkpoint has 12 after summarize (5 AI + 7 others). AI id overlap: 5 of 7 — the 2 missing AI messages are pre-summarize. + +--- + +## File Structure + +| File | Action | Responsibility | +|------|--------|----------------| +| `backend/app/gateway/routers/threads.py` | Modify | Replace checkpoint messages with event store messages in `get_thread_state` and `get_thread_history` | +| `backend/tests/test_thread_state_event_store.py` | Create | Tests for the modified endpoints | + +--- + +### Task 1: Add `_get_event_store_messages` helper to `threads.py` + +A shared helper that loads the **full** message stream from the event store, patches `id=None` messages with deterministic UUIDs, and defensively sanitizes legacy `Command(update=...)` reprs captured before the journal.py fix. Patches a copy of each content dict so the live store is never mutated. + +**Design constraints (derived from evaluation §3, §4, §5):** +- **Full pagination**, not `limit=1000`. `RunEventStore.list_messages` returns "latest N records" — a fixed limit silently truncates older messages. Use `count_messages()` to size the request or loop with `after_seq` cursors. +- **Copy before mutate**. `MemoryRunEventStore` returns live dict references; the JSONL/DB stores may return detached rows but we must not rely on that. Always `content = dict(evt["content"])` before patching `id`. +- **Legacy Command sanitization.** Legacy data contains `content["content"] == "Command(update={'artifacts': [...], 'messages': [ToolMessage(content='X', ...)]})"`. Regex-extract the inner ToolMessage content string and replace; if extraction fails, leave content as-is (still strictly better than nothing because checkpoint fallback is also wrong for summarized threads). +- **User context.** `DbRunEventStore.list_messages` is user-scoped via `resolve_user_id(AUTO)` and relies on the auth contextvar set by `@require_permission`. Both endpoints are already decorated — document this dependency in the helper docstring. + +**Files:** +- Modify: `backend/app/gateway/routers/threads.py` +- Test: `backend/tests/test_thread_state_event_store.py` + +- [ ] **Step 1: Write the test** + +Create `backend/tests/test_thread_state_event_store.py`: + +```python +"""Tests for event-store-backed message loading in thread state/history endpoints.""" + +from __future__ import annotations + +import uuid + +import pytest + +from deerflow.runtime.events.store.memory import MemoryRunEventStore + + +@pytest.fixture() +def event_store(): + return MemoryRunEventStore() + + +async def _seed_conversation(event_store: MemoryRunEventStore, thread_id: str = "t1"): + """Seed a realistic multi-turn conversation matching real checkpoint format.""" + # human_message: id is None (same as real data) + await event_store.put( + thread_id=thread_id, run_id="r1", + event_type="human_message", category="message", + content={ + "type": "human", "id": None, + "content": [{"type": "text", "text": "Hello"}], + "additional_kwargs": {}, "response_metadata": {}, "name": None, + }, + ) + # ai_tool_call: id is set by LLM + await event_store.put( + thread_id=thread_id, run_id="r1", + event_type="ai_tool_call", category="message", + content={ + "type": "ai", "id": "lc_run--abc123", + "content": "", + "tool_calls": [{"name": "search", "args": {"q": "cats"}, "id": "call_1", "type": "tool_call"}], + "invalid_tool_calls": [], + "additional_kwargs": {}, "response_metadata": {}, "name": None, + "usage_metadata": {"input_tokens": 100, "output_tokens": 50, "total_tokens": 150}, + }, + ) + # tool_result: id is None (same as real data) + await event_store.put( + thread_id=thread_id, run_id="r1", + event_type="tool_result", category="message", + content={ + "type": "tool", "id": None, + "content": "Found 10 results", + "tool_call_id": "call_1", "name": "search", + "artifact": None, "status": "success", + "additional_kwargs": {}, "response_metadata": {}, + }, + ) + # ai_message: id is set by LLM + await event_store.put( + thread_id=thread_id, run_id="r1", + event_type="ai_message", category="message", + content={ + "type": "ai", "id": "lc_run--def456", + "content": "I found 10 results about cats.", + "tool_calls": [], "invalid_tool_calls": [], + "additional_kwargs": {}, "response_metadata": {"finish_reason": "stop"}, "name": None, + "usage_metadata": {"input_tokens": 200, "output_tokens": 100, "total_tokens": 300}, + }, + ) + # Also add a trace event — should NOT appear + await event_store.put( + thread_id=thread_id, run_id="r1", + event_type="llm_request", category="trace", + content={"model": "gpt-4"}, + ) + + +class TestGetEventStoreMessages: + """Verify event store message extraction with id patching.""" + + @pytest.mark.asyncio + async def test_extracts_all_message_types(self, event_store): + await _seed_conversation(event_store) + events = await event_store.list_messages("t1", limit=500) + messages = [evt["content"] for evt in events if isinstance(evt.get("content"), dict) and "type" in evt["content"]] + assert len(messages) == 4 + assert [m["type"] for m in messages] == ["human", "ai", "tool", "ai"] + + @pytest.mark.asyncio + async def test_null_ids_get_patched(self, event_store): + """Messages with id=None should get deterministic UUIDs.""" + await _seed_conversation(event_store) + events = await event_store.list_messages("t1", limit=500) + messages = [] + for evt in events: + content = evt.get("content") + if isinstance(content, dict) and "type" in content: + if content.get("id") is None: + content["id"] = str(uuid.uuid5(uuid.NAMESPACE_URL, f"t1:{evt['seq']}")) + messages.append(content) + + # All messages now have an id + for m in messages: + assert m["id"] is not None + assert isinstance(m["id"], str) + assert len(m["id"]) > 0 + + # AI messages keep their original id + assert messages[1]["id"] == "lc_run--abc123" + assert messages[3]["id"] == "lc_run--def456" + + # Human and tool messages get deterministic ids (same input = same output) + human_id_1 = str(uuid.uuid5(uuid.NAMESPACE_URL, "t1:1")) + assert messages[0]["id"] == human_id_1 + + @pytest.mark.asyncio + async def test_empty_thread(self, event_store): + events = await event_store.list_messages("nonexistent", limit=500) + messages = [evt["content"] for evt in events if isinstance(evt.get("content"), dict)] + assert messages == [] + + @pytest.mark.asyncio + async def test_tool_call_fields_preserved(self, event_store): + await _seed_conversation(event_store) + events = await event_store.list_messages("t1", limit=500) + messages = [evt["content"] for evt in events if isinstance(evt.get("content"), dict) and "type" in evt["content"]] + + # AI tool_call message + ai_tc = messages[1] + assert ai_tc["tool_calls"][0]["name"] == "search" + assert ai_tc["tool_calls"][0]["id"] == "call_1" + + # Tool result + tool = messages[2] + assert tool["tool_call_id"] == "call_1" + assert tool["status"] == "success" +``` + +- [ ] **Step 2: Run tests to verify they pass** + +Run: `cd backend && PYTHONPATH=. uv run pytest tests/test_thread_state_event_store.py -v` + +- [ ] **Step 3: Add the helper function and modify `get_thread_history`** + +In `backend/app/gateway/routers/threads.py`: + +1. Add import at the top: +```python +import uuid # ADD (may already exist, check first) +from app.gateway.deps import get_run_event_store # ADD +``` + +2. Add the helper function (before the endpoint functions, after the model definitions): + +```python +_LEGACY_CMD_INNER_CONTENT_RE = re.compile( + r"ToolMessage\(content=(?P['\"])(?P.*?)(?P=q)", + re.DOTALL, +) + + +def _sanitize_legacy_command_repr(content_field: Any) -> Any: + """Recover the inner ToolMessage text from a legacy ``str(Command(...))`` repr. + + Runs that pre-date the ``on_tool_end`` fix in ``journal.py`` stored + ``str(Command(update={'messages':[ToolMessage(content='X', ...)]}))`` as the + tool_result content. New runs store ``'X'`` directly. For old threads, try + to extract ``'X'`` defensively; return the original string if extraction + fails (still no worse than the current checkpoint-based fallback, which is + broken for summarized threads anyway). + """ + if not isinstance(content_field, str) or not content_field.startswith("Command(update="): + return content_field + match = _LEGACY_CMD_INNER_CONTENT_RE.search(content_field) + return match.group("inner") if match else content_field + + +async def _get_event_store_messages(request: Request, thread_id: str) -> list[dict] | None: + """Load messages from the event store, returning None if unavailable. + + The event store is append-only and immune to summarization. Each + message event's ``content`` field contains a ``model_dump()``'d + LangChain Message dict that is already JSON-serialisable. + + **Full pagination, not a fixed limit.** ``RunEventStore.list_messages`` + returns the newest ``limit`` records when no cursor is given, which + silently drops older messages. We call ``count_messages()`` first and + request that many records. For stores that may return fewer (e.g. filtered + by user), we also fall back to ``after_seq``-cursor pagination. + + **Copy-on-read.** Each content dict is copied before ``id`` is patched so + the live store object is never mutated; ``MemoryRunEventStore`` returns + live references. + + **Legacy Command repr sanitization.** See ``_sanitize_legacy_command_repr``. + + **User context.** ``DbRunEventStore`` is user-scoped by default via + ``resolve_user_id(AUTO)`` (see ``runtime/user_context.py``). Callers of + this helper must be inside a request where ``@require_permission`` has + populated the user contextvar. Both ``get_thread_history`` and + ``get_thread_state`` satisfy that. Do not call this helper from CLI or + migration scripts without passing ``user_id=None`` explicitly. + + Returns ``None`` when the event store is not configured or contains no + messages for this thread, so callers can fall back to checkpoint messages. + """ + try: + event_store = get_run_event_store(request) + except Exception: + return None + + try: + total = await event_store.count_messages(thread_id) + except Exception: + logger.exception("count_messages failed for thread %s", sanitize_log_param(thread_id)) + return None + if not total: + return None + + # Batch by page_size to keep memory bounded for very long threads. + page_size = 500 + collected: list[dict] = [] + after_seq: int | None = None + while True: + page = await event_store.list_messages(thread_id, limit=page_size, after_seq=after_seq) + if not page: + break + collected.extend(page) + if len(page) < page_size: + break + after_seq = page[-1].get("seq") + if after_seq is None: + break + + messages: list[dict] = [] + for evt in collected: + raw = evt.get("content") + if not isinstance(raw, dict) or "type" not in raw: + continue + # Copy to avoid mutating the store-owned dict. + content = dict(raw) + if content.get("id") is None: + content["id"] = str(uuid.uuid5(uuid.NAMESPACE_URL, f"{thread_id}:{evt['seq']}")) + # Sanitize legacy Command reprs on tool_result messages only. + if content.get("type") == "tool": + content["content"] = _sanitize_legacy_command_repr(content.get("content")) + messages.append(content) + return messages if messages else None +``` + +Also add `import re` at the top of the file if it isn't already imported. + +3. In `get_thread_history` (around line 585-590), replace the messages section: + +**Before:** +```python + # Attach messages from checkpointer only for the latest checkpoint + if is_latest_checkpoint: + messages = channel_values.get("messages") + if messages: + values["messages"] = serialize_channel_values({"messages": messages}).get("messages", []) + is_latest_checkpoint = False +``` + +**After:** +```python + # Attach messages: prefer event store (immune to summarization), + # fall back to checkpoint messages when event store is unavailable. + if is_latest_checkpoint: + es_messages = await _get_event_store_messages(request, thread_id) + if es_messages is not None: + values["messages"] = es_messages + else: + messages = channel_values.get("messages") + if messages: + values["messages"] = serialize_channel_values({"messages": messages}).get("messages", []) + is_latest_checkpoint = False +``` + +- [ ] **Step 4: Modify `get_thread_state` similarly** + +In `get_thread_state` (around line 443-444), replace: + +**Before:** +```python + return ThreadStateResponse( + values=serialize_channel_values(channel_values), +``` + +**After:** +```python + values = serialize_channel_values(channel_values) + + # Override messages with event store data (immune to summarization) + es_messages = await _get_event_store_messages(request, thread_id) + if es_messages is not None: + values["messages"] = es_messages + + return ThreadStateResponse( + values=values, +``` + +- [ ] **Step 5: Run all backend tests** + +Run: `cd backend && PYTHONPATH=. uv run pytest tests/ -v --timeout=30 -x` + +- [ ] **Step 6: Commit** + +```bash +git add backend/app/gateway/routers/threads.py backend/tests/test_thread_state_event_store.py +git commit -m "feat(threads): load messages from event store instead of checkpoint state + +Event store is append-only and immune to summarization. Messages with +null ids (human, tool) get deterministic UUIDs based on thread_id:seq +for stable frontend rendering." +``` + +--- + +### Task 2 (OPTIONAL, deferred): Reduce flush_threshold for shorter mid-stream gap + +**Status:** Not a correctness fix. Re-evaluation (see spec) found that `RunJournal` already flushes on `run_end`, `run_error`, cancel, and worker `finally` paths. The only window this tuning narrows is a hard process crash or mid-run reload. Defer and decide separately; do not couple with Task 1 merge. + +If pursued: change `flush_threshold` default from 20 → 5 in `journal.py:42`, rerun `tests/test_run_journal.py`, commit as a separate `perf(journal): …` commit. + +--- + +### Task 3: Fix `useThreadFeedback` pagination in frontend + +Once `/history` returns the full event-store-backed message stream, the frontend's `runIdByAiIndex` map must also cover the full stream or its positional AI-index mapping drifts and feedback clicks go to the wrong `run_id`. The current hook hardcodes `limit=200`. + +**Files:** +- Modify: `frontend/src/core/threads/hooks.ts` (around line 679) + +- [ ] **Step 1: Replace the fixed `?limit=200` with full pagination** + +Change: + +```ts +const res = await fetchWithAuth( + `${getBackendBaseURL()}/api/threads/${encodeURIComponent(threadId)}/messages?limit=200`, +); +``` + +to a loop that pages via `after_seq` (or an equivalent query param exposed by the `/messages` endpoint — check `backend/app/gateway/routers/thread_runs.py:285-323` for the actual parameter names before writing the TS code). Accumulate `messages` until a page returns fewer than the page size. + +- [ ] **Step 2: Defensive index guard** + +`runIdByAiIndex[aiMessageIndex]` can still be `undefined` when the frontend renders optimistic state before the messages query refreshes. The current `?? undefined` in `message-list.tsx:71` already handles this; do not remove it. + +- [ ] **Step 3: Invalidate `["thread-feedback", threadId]` after a new run** + +In `useThreadStream` (or wherever stream-end is handled), call `queryClient.invalidateQueries({ queryKey: ["thread-feedback", threadId] })` when the stream closes so the runIdByAiIndex picks up the new run's AI message immediately. + +- [ ] **Step 4: Run `pnpm check`** + +```bash +cd frontend && pnpm check +``` + +- [ ] **Step 5: Commit** + +```bash +git add frontend/src/core/threads/hooks.ts +git commit -m "fix(feedback): paginate useThreadFeedback and invalidate after stream" +``` + +--- + +### Task 4: End-to-end test — summarize + multi-run feedback + +Add a regression test that exercises the exact bug class we are fixing: a summarized thread with at least two runs, where feedback clicks must target the correct `run_id`. + +**Files:** +- Modify: `backend/tests/test_thread_state_event_store.py` + +- [ ] **Step 1: Write the test** + +Seed a `MemoryRunEventStore` with two runs worth of messages (`r1`: human + ai + human + ai, `r2`: human + ai), then simulate a summarized checkpoint state that drops the `r1` messages. Call `_get_event_store_messages` and assert: + +- Length matches the event store, not the checkpoint +- The first message is the original `r1` human, not a summary +- AI messages preserve their `lc_run--*` ids in order +- Any `id=None` messages get a stable `uuid5(...)` id +- A legacy `str(Command(update=...))` content field in a tool_result is sanitized to the inner text + +- [ ] **Step 2: Run the new test** + +```bash +cd backend && PYTHONPATH=. uv run pytest tests/test_thread_state_event_store.py -v +``` + +- [ ] **Step 3: Commit with Tasks 1, 3 changes** + +Bundle with the Task 1 commit so tests always land alongside the implementation. + +--- + +### Task 5: Standard mode follow-up (documentation only) + +Standard mode (`make dev`) hits LangGraph Server directly for `/threads/{id}/history` and does not go through the Gateway router we just patched. The summarize bug is still present there. + +**Files:** +- Modify: this plan (add follow-up section at the bottom, see below) OR create a separate tracking issue + +- [ ] **Step 1: Record the gap** + +Append to the bottom of this plan (or open a GitHub issue and link it): + +> **Follow-up — Standard mode summarize bug** +> `get_thread_history` in `backend/app/gateway/routers/threads.py` is only hit in Gateway mode. Standard mode proxies `/api/langgraph/*` directly to the LangGraph Server (see `backend/CLAUDE.md` nginx routing and `frontend/CLAUDE.md` `NEXT_PUBLIC_LANGGRAPH_BASE_URL`). The summarize-message-loss symptom is still reproducible there. Options: (a) teach the LangGraph Server checkpointer to branch on an override, (b) move `/history` behind Gateway in Standard mode as well, (c) accept as known limitation for Standard mode. Decide before GA. diff --git a/docs/superpowers/specs/2026-04-11-runjournal-history-evaluation.md b/docs/superpowers/specs/2026-04-11-runjournal-history-evaluation.md new file mode 100644 index 000000000..44a466960 --- /dev/null +++ b/docs/superpowers/specs/2026-04-11-runjournal-history-evaluation.md @@ -0,0 +1,191 @@ +# RunJournal 替换 History Messages — 方案评估与对比 + +**日期**:2026-04-11 +**分支**:`rayhpeng/fix-persistence-new` +**相关 plan**:[`docs/superpowers/plans/2026-04-10-event-store-history.md`](../plans/2026-04-10-event-store-history.md)(尚未落地) + +--- + +## 1. 问题与数据核对 + +**症状**:SummarizationMiddleware 触发后,前端历史中无法展示 summarize 之前的真实用户消息。 + +**复现数据**(thread `6d30913e-dcd4-41c8-8941-f66c716cf359`): + +| 数据源 | seq=1 的 message | 总 message 数 | 是否保留原始 human | +|---|---|---:|---| +| `run_events`(SQLite) | human `"最新伊美局势"` | 9(1 human + 7 ai_tool_call + 9 tool_result + 1 ai_message) | ✅ | +| `/history` 响应(`docs/resp.json`) | type=human,content=`"Here is a summary of the conversation to date:…"` | 不定 | ❌(已被 summary 替换)| + +**根因**:`backend/app/gateway/routers/threads.py:587-589` 的 `get_thread_history` 从 `checkpoint.channel_values["messages"]` 读取,而 LangGraph 的 SummarizationMiddleware 会原地改写这个列表。 + +--- + +## 2. 候选方案 + +| 方案 | 描述 | 本次是否推荐 | +|---|---|---| +| **A. event_store 覆盖 messages**(已有 plan) | `/history`、`/state` 改读 `RunEventStore.list_messages()`,覆盖 `channel_values["messages"]`;其它字段保持 checkpoint 来源 | ✅ 主方案 | +| B. 修 SummarizationMiddleware | 让 summarize 不原地替换 messages(作为附加 system message) | ❌ 违背 summarize 的 token 预算初衷 | +| C. 双读合并(checkpoint + event_store diff) | 合并 summarize 切点前后的两段 | ❌ 合并逻辑复杂无额外收益 | +| D. 切到现有 `/api/threads/{id}/messages` 端点 | 前端直接消费已经存在的 event-store 消息端点(`thread_runs.py:285-323`)| ⚠️ 更干净但需要前端改动 | + +--- + +## 3. Claude 自评 vs Codex 独立评估 + +两方独立分析了同一份 plan。重合点基本一致,但 **Codex 发现了一个我遗漏的关键 bug**。 + +### 3.1 一致结论 + +| 维度 | 结论 | +|---|---| +| 正确性方向 | event_store 是 append-only + 不受 summarize 影响,方向正确 | +| ID 补齐 | `uuid5(NAMESPACE_URL, f"{thread_id}:{seq}")` 稳定且确定性,安全 | +| 前端 schema | 零改动 | +| Non-message 字段(artifacts/todos/title/thread_data) | summarize 只影响 messages,不需要覆盖其它字段 | +| 多 checkpoint 语义 | 前端 `useStream` 只取 `limit: 1`(`frontend/src/core/threads/hooks.ts:203-210`),不做时间旅行;latest-only 可接受但应在注释/文档写清楚 | +| 作用域 | 仅 Gateway mode;Standard mode 直连 LangGraph Server,bug 在默认部署路径仍然存在 | + +### 3.2 Claude 的独立观察 + +1. 已验证数据对齐:plan 文档第 15-28 行的真实数据对齐表与本次 `run_events` 导出一致(9 条消息 id 分布:AI 来自 LLM `lc_run--*`、human/tool 为 None)。 +2. 担心 `run_end` / `run_error` / `cancel` 路径未必都 flush —— 这一点 Codex 实际核查了代码并给出确定结论(见下)。 +3. 方案 A 的单文件改动约 60 行,复杂度小。 + +### 3.3 Codex 的关键补充(Claude 遗漏) + +> **Bug #1 — Plan 用 `limit=1000` 并非全量** +> `RunEventStore.list_messages()` 的语义是"返回最新 limit 条"(`base.py:51-65`、`db.py:151-181`)。对于消息数超过 1000 的长对话,plan 当前写法会**丢掉最早的消息**,再次引入"消息丢失"bug(只是换了丢失的段)。 + +> **Bug #2 — helper 就地修改了 store 的 dict** +> plan 的 helper 里对 `content` 原地写 `id`;`MemoryRunEventStore` 返回的是**活引用**,会污染 store 中的对象。应 deep-copy 或 dict 推导出新对象。 + +> **Flush 路径已核查**: +> `RunJournal` 在 threshold (`journal.py:360-373`)、`run_end` (`91-96`)、`run_error` (`97-106`)、worker `finally` (`worker.py:280-286`) 都会 flush;`CancelledError` 也走 finally。**正常 end/error/cancel 都 flush,仅硬 kill / 进程崩溃会丢缓冲区**。 +> 因此 `flush_threshold 20 → 5` 的意义**仅在于硬崩溃窗口**与 mid-run reload 可见性,**不是正确性修复**,属于可选 tuning。代价是更多 put_batch / SQLite churn;且 `_flush_sync()` (`383-398`) 已防止并发 flush,所以"每 5 条一 flush"是 best-effort 非严格保证。 + +### 3.4 Codex 未否决但提示的次要点 + +- 方案 D(消费现有 `/api/threads/{id}/messages` 端点)更干净但需前端改动。 +- `/history` 一旦被方案 A 改过,就不再是严格意义上的"按 checkpoint 快照"API(对 messages 字段),应写进注释和 API 文档。 +- Standard mode 的 summarize bug 应建立独立 follow-up issue。 + +--- + +## 4. 最终合并判决 + +**Codex**:APPROVE-WITH-CHANGES +**Claude**:同意 Codex 的判决 + +### 合并前必须修改(Top 3) + +1. **修复分页 bug**:不能用固定 `limit=1000`。必须用以下之一: + - `count = await event_store.count_messages(thread_id)`,再 `list_messages(thread_id, limit=count)` + - 或循环 cursor 分页(`after_seq`)直到耗尽 +2. **不要原地修改 store dict**:helper 对 `content` 的 id 补齐需要 copy(`dict(content)` 浅拷贝足够,因为只写 top-level `id`) +3. **Standard mode 显式 follow-up**:在 plan 文末加 "Standard-mode follow-up: TODO #xxx",或在合并 PR 描述中明确这是 Gateway-only 止血 + +### 可选(非阻塞) + +4. `flush_threshold 20 → 5` 降级为"可选 tuning",不是修复的一部分;或独立一条 commit 并说明只对硬崩溃窗口有用 +5. `get_thread_history` 新增注释,说明 messages 字段脱离了 checkpoint 快照语义 +6. 测试覆盖:模拟 summarize 后的 checkpoint + 真实 event_store,端到端验证 `/history` 返回包含原始 human 消息 + +--- + +## 5. 推荐执行顺序 + +1. 按本文档 §4 修订 `docs/superpowers/plans/2026-04-10-event-store-history.md`(主要是 Task 1 的 helper 实现 + 分页) +2. 按修订后的 plan 执行(走 `superpowers:executing-plans`) +3. 合并后立即建 Standard mode follow-up issue + +## 6. Feedback 影响分析(2026-04-11 补充) + +### 6.1 数据模型 + +`feedback` 表(`persistence/feedback/model.py`): + +| 字段 | 说明 | +|---|---| +| `feedback_id` PK | - | +| `run_id` NOT NULL | 反馈目标 run | +| `thread_id` NOT NULL | - | +| `user_id` | - | +| `message_id` nullable | 注释明确写:`optional RunEventStore event identifier` — 已经面向 event_store 设计 | +| UNIQUE(thread_id, run_id, user_id) | 每 run 每用户至多一条 | + +**结论**:feedback **不按 message uuid 存**,按 `run_id` 存,所以 summarize 导致的 checkpoint messages 丢失**不会影响 feedback 存储**。schema 天生与 event_store 兼容,**无需数据迁移**。 + +### 6.2 前端的 runId 映射:发现隐藏 bug + +前端 feedback 目前走两条并行的数据链: + +| 用途 | 数据源 | 位置 | +|---|---|---| +| 渲染消息体 | `POST /history`(checkpoint) | `useStream` → `thread.messages` | +| 拿 `runId` 映射 | `GET /api/threads/{id}/messages?limit=200`(**event_store**) | `useThreadFeedback` (`hooks.ts:669-709`) | + +两者通过 **"AI 消息的序号"** 对齐: + +```ts +// hooks.ts:691-698 +for (const msg of messages) { + if (msg.event_type === "ai_message") { + runIdByAiIndex.push(msg.run_id); // 只按 AI 顺序 push + } +} +// message-list.tsx:70-71 +runId = feedbackData.runIdByAiIndex[aiMessageIndex] +``` + +**Bug**:summarize 过的 thread 里,两条数据链的 AI 消息数量和顺序**不一致**: + +| 数据源 | 本 thread 的 AI 消息序列 | 数量 | +|---|---|---:| +| `/history`(checkpoint,summarize 后) | seq=19,31,37,45,53 | 5 | +| `/messages`(event_store,完整) | seq=5,13,19,31,37,45,53 | 7 | + +结果:前端渲染的"第 0 条 AI 消息"是 seq=19,但 `runIdByAiIndex[0]` 指向 seq=5 的 run(本例同一 run 里没事,**跨多 run 的 thread 点赞就会打到错的 run 上**)。 + +**这个 bug 和本次 plan 无关,已经存在了**。只是用户未必注意到。 + +### 6.3 方案 A 对 feedback 的影响 + +**负面**:无。feedback 存储不受影响。 + +**正面(意外收益)**:`/history` 切换到 event_store 后,**两条数据链的 AI 消息序列自动对齐**,§6.2 的隐藏 bug 被顺带修好。 + +**前提条件**(加入 Top 3 改动之一同等重要): + +- 新 helper 必须和 `/messages` 端点用**同样的消息获取逻辑**(same store, same filter)。否则两条链仍然可能在边界条件下漂移 +- 具体说:**两边都要做完整分页**。目前 `/messages?limit=200` 在前端硬编码 200,如果 thread 有 >200 条消息就会截断;plan 的 `limit=1000` 也一样有上限。两个上限不一致 → 两边顺序不再对齐 → feedback 映射错位 +- **必须修**:`useThreadFeedback` 的 `limit=200` 需要改成分页获取全部,或者 `/messages` 后端改为默认全量 + +### 6.4 对前端改造顺序的影响 + +原 plan 声明"零前端改动",但加入 feedback 考虑后应修正为: + +| 改动 | 必须 | 可选 | +|---|---|---| +| 后端 `/history` 改读 event_store | ✅ | - | +| 后端 helper 用分页而非 `limit=1000` | ✅ | - | +| 前端 `useThreadFeedback` 改用分页或提升 limit | ✅ | - | +| `runIdByAiIndex` 增加防御:索引越界 fallback `undefined`(已有)| - | ✅ 已经是 | +| 前端改用 `/messages` 直接做渲染(方案 D) | - | ✅ 长期更干净 | + +### 6.5 feedback 相关的新 Top 3 补充 + +在原来的 Top 3 之外,再加: + +4. **前端 `useThreadFeedback` 必须分页或拉全**(`frontend/src/core/threads/hooks.ts:679`),否则和 `/history` 的新全量行为仍然错位 +5. **端到端测试**:一个 thread 跨 >1 个 run + 触发 summarize + 给历史 AI 消息点赞,确认 feedback 打到正确的 run_id +6. **TanStack Query 缓存协调**:`thread-feedback` 与 history 查询的 `staleTime` / invalidation 需要在新 run 结束时同步刷新,否则新消息写入后 `runIdByAiIndex` 没更新,点赞会打到上一个 run + +--- + +## 8. 未决问题 + +- `RunEventStore.count_messages()` 与 `list_messages(after_seq=...)` 的实际性能(SQLite 上对于数千消息级别应无问题,但未压测) +- `MemoryRunEventStore` 与 `DbRunEventStore` 分页语义是否一致(Codex 只核查了 `db.py`,`memory.py` 需确认) +- 是否应把 `/api/threads/{id}/messages` 提升为前端主用 endpoint,把 `/history` 保留为纯 checkpoint API —— 架构层面更干净但成本更高 diff --git a/docs/superpowers/specs/2026-04-11-summarize-marker-design.md b/docs/superpowers/specs/2026-04-11-summarize-marker-design.md new file mode 100644 index 000000000..79cd748d4 --- /dev/null +++ b/docs/superpowers/specs/2026-04-11-summarize-marker-design.md @@ -0,0 +1,203 @@ +# Summarize Marker in History — Design & Verification + +**Date**: 2026-04-11 +**Branch**: `rayhpeng/fix-persistence-new` +**Status**: Design approved, implementation deferred to a follow-up PR +**Depends on**: [`2026-04-11-runjournal-history-evaluation.md`](./2026-04-11-runjournal-history-evaluation.md) (the event-store-backed history fix this builds on) + +--- + +## 1. Goal + +Display a "summarization happened here" marker in the conversation history UI when `SummarizationMiddleware` ran mid-run, so users understand why earlier messages look condensed or missing. The event-store-backed `/history` fix already recovered the original messages; this spec adds a **visible marker** at the seq position where summarization occurred, optionally showing the generated summary text. + +## 2. Investigation findings + +### 2.1 Today's state: zero middleware records + +Full scan of `backend/.deer-flow/data/deerflow.db` `run_events`: + +| category | rows | +|---|---:| +| trace | 76 | +| message | 34 | +| lifecycle | 8 | +| **middleware** | **0** | + +No row has `event_type` containing `summariz` or `middleware`. The middleware category is dead in production. + +### 2.2 Why: two dead code paths in `journal.py` + +| Location | Status | +|---|---| +| `journal.py:343-362` — `on_custom_event("summarization", ...)` writes one trace event + one `category="middleware"` event. | Dead. Only fires when something calls `adispatch_custom_event("summarization", {...})`. The upstream LangChain `SummarizationMiddleware` (`.venv/.../langchain/agents/middleware/summarization.py:272`) **never emits custom events** — its `before_model`/`abefore_model` just mutate messages in place and return `{'messages': new_messages}`. Callback never triggered. | +| `journal.py:449` — `record_middleware(tag, *, name, hook, action, changes)` helper | Dead. Grep shows zero callers in the harness. Added speculatively, never wired up. | + +### 2.3 Concrete evidence of summarize running unlogged + +Thread `3d5dea4a-0983-4727-a4e8-41a64428933a`: + +- `run_events` seq=1 → original human `"写一份关于deer-flow的详细技术报告"` ✓ (event store is fine) +- `run_events` seq=43 → `llm_request` trace whose `messages[0]` literal contains `"Here is a summary of the conversation to date:"` — proof that SummarizationMiddleware did inject a summary mid-run +- Zero rows with `category='middleware'` for this thread → nothing captured for UI to render + +## 3. Approaches considered + +### A. Subclass `SummarizationMiddleware` and dispatch a custom event + +Wrap the upstream class, override `abefore_model`, call `await adispatch_custom_event("summarization", {...})` after super(). Journal's existing `on_custom_event` path captures it. + +### B. Frontend-only diff heuristic + +Compare `event_store.count_messages()` vs rendered count, infer summarization happened from the gap. **Rejected**: can't pinpoint position in the stream, can't show summary text. Only yields a vague badge. + +### C. Hybrid A + frontend inline card rendered at the middleware event's seq position + +Same backend as A, plus frontend renders an inline `[N messages condensed]` card at the correct chronological position. **Recommended terminal state**. + +## 4. Subagent's wrong claim and its rebuttal + +An independent agent flagged approach A as structurally broken because: + +> `RunnableCallable(trace=False)` skips `set_config_context`, therefore `var_child_runnable_config` is never set, therefore `adispatch_custom_event` raises `RuntimeError("Unable to dispatch an adhoc event without a parent run id")`. + +**This is wrong.** The user's counter-intuition was correct: `trace=False` does not prevent `adispatch_custom_event` from working, as long as the middleware signature explicitly accepts `config: RunnableConfig`. The mechanism: + +1. `RunnableCallable.__init__` (`langgraph/_internal/_runnable.py:293-319`) inspects the function signature. If it accepts `config: RunnableConfig`, that parameter is recorded in `self.func_accepts`. +2. Both `trace=True` and `trace=False` branches of `ainvoke` run the same kwarg-injection loop (`_runnable.py:349-356`): `if kw == "config": kw_value = config`. The `config` passed to `ainvoke` (from Pregel's `task.proc.ainvoke(task.input, config)` at `pregel/_retry.py:138`) is the task config with callbacks already bound. +3. Inside the middleware, passing that `config` explicitly to `adispatch_custom_event(..., config=config)` means the function doesn't rely on `var_child_runnable_config.get()` at all. The LangChain docstring at `langchain_core/callbacks/manager.py:2574-2579` even says "If using python 3.10 and async, you MUST specify the config parameter" — which is exactly this path. + +`trace=False` only changes whether **this runnable layer creates a new child callback scope**. It does not affect whether the outer-layer config (with callbacks including `RunJournal`) is passed down to the function. + +## 5. Verification + +Ran `/tmp/verify_summarize_event.py` (standalone minimal reproduction): + +- Minimal `AgentMiddleware` subclass with `abefore_model(self, state, runtime, config: RunnableConfig)` +- Calls `await adispatch_custom_event("summarization", {...}, config=config)` inside +- `create_agent(model=FakeChatModel, middleware=[probe])` +- `agent.ainvoke({...}, config={"callbacks": [RecordingHandler()]})` + +**Result**: + +``` +INFO verify: ProbeMiddleware.abefore_model called +INFO verify: config keys: ['callbacks', 'configurable', 'metadata'] +INFO verify: config.callbacks type: AsyncCallbackManager +INFO verify: config.metadata: {'langgraph_step': 1, 'langgraph_node': 'probe.before_model', ...} +INFO verify: on_custom_event fired: name=summarization + run_id=019d7d19-1727-7830-aa33-648ecbee4b95 + data={'summary': 'fake summary', 'replaced_count': 3} +SUCCESS: approach A is viable (config injection + adispatch work) +``` + +All five predictions held: + +1. ✅ `config: RunnableConfig` signature triggers auto-injection despite `trace=False` +2. ✅ `config.callbacks` is an `AsyncCallbackManager` with `parent_run_id` set +3. ✅ `adispatch_custom_event(..., config=config)` runs without error +4. ✅ `RecordingHandler.on_custom_event` receives the event +5. ✅ The received `run_id` is a valid UUID tied to the running graph + +**Bonus finding**: `config.metadata` contains `langgraph_step` and `langgraph_node`. These can be included in the middleware event's metadata to help the frontend position the marker on the timeline. + +## 6. Recommended implementation (approach C) + +### 6.1 Backend + +**New wrapper middleware** in `backend/packages/harness/deerflow/agents/lead_agent/agent.py`: + +```python +from langchain.agents.middleware.summarization import SummarizationMiddleware +from langchain_core.callbacks import adispatch_custom_event +from langchain_core.runnables import RunnableConfig + + +class _TrackingSummarizationMiddleware(SummarizationMiddleware): + """Wraps upstream SummarizationMiddleware to emit a ``summarization`` + custom event on every actual summarization, so RunJournal can persist + a middleware:summarize row to the event store. + + The upstream class does not emit events of its own. Declaring + ``config: RunnableConfig`` in the override lets LangGraph's + ``RunnableCallable`` inject the Pregel task config (with callbacks + and parent_run_id) regardless of ``trace=False`` on the node. + """ + + async def abefore_model(self, state, runtime, config: RunnableConfig): + before_count = len(state.get("messages") or []) + result = await super().abefore_model(state, runtime) + if result is None: + return None + + new_messages = result.get("messages") or [] + replaced_count = max(0, before_count - len(new_messages)) + summary_text = _extract_summary_text(new_messages) + + await adispatch_custom_event( + "summarization", + { + "summary": summary_text, + "replaced_count": replaced_count, + }, + config=config, + ) + return result + + +def _extract_summary_text(messages: list) -> str: + """Pull the summary string out of the HumanMessage the upstream class + injects as ``Here is a summary of the conversation to date:...``.""" + for msg in messages: + if getattr(msg, "type", None) == "human": + content = getattr(msg, "content", "") + text = content if isinstance(content, str) else "" + if text.startswith("Here is a summary of the conversation to date"): + return text + return "" +``` + +Swap the existing `SummarizationMiddleware()` instantiation in `_build_middlewares` for `_TrackingSummarizationMiddleware(...)` with the same args. + +**Journal change**: **zero**. `on_custom_event("summarization", ...)` in `journal.py:343-362` already writes both a trace and a `category="middleware"` row. + +**History helper change**: extend `_get_event_store_messages` in `backend/app/gateway/routers/threads.py` to surface `category="middleware"` rows as pseudo-messages, e.g.: + +```python +# In the per-event loop, after the existing message branch: +if evt.get("category") == "middleware" and evt.get("event_type") == "middleware:summarize": + meta = evt.get("metadata") or {} + messages.append({ + "id": f"summary-marker-{evt['seq']}", + "type": "summary_marker", + "replaced_count": meta.get("replaced_count", 0), + "summary": (raw or {}).get("content", "") if isinstance(raw, dict) else "", + "run_id": evt.get("run_id"), + }) +``` + +The marker uses a sentinel `type` (`summary_marker`) that doesn't collide with any LangChain message type, so downstream consumers that loop over messages can skip or render it explicitly. + +### 6.2 Frontend + +- `core/messages/utils.ts`: extend the message grouping to recognize `type === "summary_marker"` and yield it as its own group (`"assistant:summary-marker"`) +- `components/workspace/messages/message-list.tsx`: add a branch in the grouped render switch that renders a distinctive inline card showing `N messages condensed` and a collapsible panel with the summary text +- No changes to feedback logic: the marker has no `feedback` field so the button naturally doesn't render on it + +## 7. Risks + +1. **Synchronous path**. The upstream class has both `before_model` and `abefore_model`. Our wrapper only overrides the async variant. If any deer-flow code path ever uses the sync flow, those summarizations won't be captured. Mitigation: also override `before_model` and use `dispatch_custom_event` (sync variant) with the same pattern. +2. **`_extract_summary_text` fragility**. It depends on the upstream class prefix `"Here is a summary of the conversation to date"` in the injected `HumanMessage`. Any upstream template change breaks detection. Mitigation: pick the first new `HumanMessage` that wasn't in `state["messages"]` before super() — resilient to template wording changes at the cost of a small diff helper. +3. **`replaced_count` accuracy when concurrent updates**. If another middleware in the chain also modifies `state["messages"]` before super() returns, the naive `before_count - len(new_messages)` arithmetic is wrong. Mitigation: inspect the `RemoveMessage(id=REMOVE_ALL_MESSAGES)` that upstream emits and count from the original input list directly. +4. **History helper contract change**. Introducing a non-LangChain-typed entry (`type="summary_marker"`) in the `/history` response could break frontend code that blindly casts entries to `Message`. Mitigation: the frontend change above adds an explicit branch; type-check the frontend end-to-end before merging. + +## 8. Out of scope / deferred + +- Other middleware types (Title, Guardrail, HITL) do not emit custom events either. If we want markers for those too, repeat the wrapper pattern for each. Not in this design. +- Retroactive markers for old threads (captured before this patch) are impossible without re-running the graph. Legacy threads will show the event-store-recovered messages without a marker. +- Standard mode (`make dev`) — agent runs inside LangGraph Server, not the Gateway-embedded runtime. `RunJournal` may not be wired there, so the custom event fires but is captured by no one. Tracked as a separate follow-up. + +## 9. Next actions + +1. Land the current summarize-message-loss fixes (journal `Command` unwrap + event-store-backed `/history` + inline feedback) — implementation verified, being committed now as three commits on `rayhpeng/fix-persistence-new` +2. Summarize-marker implementation (this spec) → separate follow-up PR based on the above verified design diff --git a/extensions_config.example.json b/extensions_config.example.json index dc0e224ea..118c5d6db 100644 --- a/extensions_config.example.json +++ b/extensions_config.example.json @@ -1,4 +1,7 @@ { + "mcpInterceptors": [ + "my_package.mcp.auth:build_auth_interceptor" + ], "mcpServers": { "filesystem": { "enabled": false, diff --git a/frontend/.env.example b/frontend/.env.example index 96c1431c8..19cce7478 100644 --- a/frontend/.env.example +++ b/frontend/.env.example @@ -14,10 +14,3 @@ # Only set these if you need to connect to backend services directly # NEXT_PUBLIC_BACKEND_BASE_URL="http://localhost:8001" # NEXT_PUBLIC_LANGGRAPH_BASE_URL="http://localhost:2024" - -# LangGraph API base URL -# Default: /api/langgraph (uses langgraph dev server via nginx) -# Set to /api/langgraph-compat to use the experimental Gateway-backed runtime -# Requires: SKIP_LANGGRAPH_SERVER=1 in serve.sh (optional, saves resources) -# NEXT_PUBLIC_LANGGRAPH_BASE_URL=/api/langgraph-compat - diff --git a/frontend/.prettierignore b/frontend/.prettierignore index 7d43e2a67..1eebfc69d 100644 --- a/frontend/.prettierignore +++ b/frontend/.prettierignore @@ -1,2 +1,3 @@ pnpm-lock.yaml .omc/ +src/content/**/*.mdx diff --git a/frontend/next.config.js b/frontend/next.config.js index 0f22635c2..5b20aad5f 100644 --- a/frontend/next.config.js +++ b/frontend/next.config.js @@ -23,10 +23,6 @@ const config = { devIndicators: false, async rewrites() { const rewrites = []; - const langgraphURL = getInternalServiceURL( - "DEER_FLOW_INTERNAL_LANGGRAPH_BASE_URL", - "http://127.0.0.1:2024", - ); const gatewayURL = getInternalServiceURL( "DEER_FLOW_INTERNAL_GATEWAY_BASE_URL", "http://127.0.0.1:8001", @@ -35,11 +31,11 @@ const config = { if (!process.env.NEXT_PUBLIC_LANGGRAPH_BASE_URL) { rewrites.push({ source: "/api/langgraph", - destination: langgraphURL, + destination: `${gatewayURL}/api`, }); rewrites.push({ source: "/api/langgraph/:path*", - destination: `${langgraphURL}/:path*`, + destination: `${gatewayURL}/api/:path*`, }); } @@ -66,8 +62,8 @@ const config = { // their own NEXT_PUBLIC_* env var toggle. // // NOTE: this must come AFTER the /api/langgraph rewrite above so that - // LangGraph routes are matched first when NEXT_PUBLIC_LANGGRAPH_BASE_URL - // is unset. + // LangGraph-compatible routes keep their public prefix while Gateway + // receives its native /api/* paths. rewrites.push({ source: "/api/:path*", destination: `${gatewayURL}/api/:path*`, diff --git a/frontend/package.json b/frontend/package.json index 198dba37b..2ce4e2f6d 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -54,7 +54,6 @@ "@xyflow/react": "^12.10.0", "ai": "^6.0.33", "best-effort-json-parser": "^1.2.1", - "better-auth": "^1.3", "canvas-confetti": "^1.9.4", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", @@ -89,7 +88,7 @@ "tokenlens": "^1.3.1", "unist-util-visit": "^5.0.0", "use-stick-to-bottom": "^1.1.1", - "uuid": "^13.0.0", + "uuid": "^14.0.0", "zod": "^3.24.2" }, "devDependencies": { diff --git a/frontend/playwright.config.ts b/frontend/playwright.config.ts index 2673564b6..2ac92822a 100644 --- a/frontend/playwright.config.ts +++ b/frontend/playwright.config.ts @@ -28,6 +28,7 @@ export default defineConfig({ timeout: 120_000, env: { SKIP_ENV_VALIDATION: "1", + DEER_FLOW_AUTH_DISABLED: "1", }, }, }); diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml index 0d5fe8d88..0f6d2d81c 100644 --- a/frontend/pnpm-lock.yaml +++ b/frontend/pnpm-lock.yaml @@ -113,9 +113,6 @@ importers: best-effort-json-parser: specifier: ^1.2.1 version: 1.2.1 - better-auth: - specifier: ^1.3 - version: 1.4.18(next@16.1.7(@opentelemetry/api@1.9.0)(@playwright/test@1.59.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@4.1.4(@opentelemetry/api@1.9.0)(@types/node@20.19.33)(vite@7.3.1(@types/node@20.19.33)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.3)))(vue@3.5.28(typescript@5.9.3)) canvas-confetti: specifier: ^1.9.4 version: 1.9.4 @@ -219,8 +216,8 @@ importers: specifier: ^1.1.1 version: 1.1.3(react@19.2.4) uuid: - specifier: ^13.0.0 - version: 13.0.0 + specifier: ^14.0.0 + version: 14.0.0 zod: specifier: ^3.24.2 version: 3.25.76 @@ -323,27 +320,6 @@ packages: resolution: {integrity: sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==} engines: {node: '>=6.9.0'} - '@better-auth/core@1.4.18': - resolution: {integrity: sha512-q+awYgC7nkLEBdx2sW0iJjkzgSHlIxGnOpsN1r/O1+a4m7osJNHtfK2mKJSL1I+GfNyIlxJF8WvD/NLuYMpmcg==} - peerDependencies: - '@better-auth/utils': 0.3.0 - '@better-fetch/fetch': 1.1.21 - better-call: 1.1.8 - jose: ^6.1.0 - kysely: ^0.28.5 - nanostores: ^1.0.1 - - '@better-auth/telemetry@1.4.18': - resolution: {integrity: sha512-e5rDF8S4j3Um/0LIVATL2in9dL4lfO2fr2v1Wio4qTMRbfxqnUDTa+6SZtwdeJrbc4O+a3c+IyIpjG9Q/6GpfQ==} - peerDependencies: - '@better-auth/core': 1.4.18 - - '@better-auth/utils@0.3.0': - resolution: {integrity: sha512-W+Adw6ZA6mgvnSnhOki270rwJ42t4XzSK6YWGF//BbVXL6SwCLWfyzBc1lN2m/4RM28KubdBKQ4X5VMoLRNPQw==} - - '@better-fetch/fetch@1.1.21': - resolution: {integrity: sha512-/ImESw0sskqlVR94jB+5+Pxjf+xBwDZF/N5+y2/q4EqD7IARUTSpPfIo8uf39SYpCxyOCtbyYpUrZ3F/k0zT4A==} - '@braintree/sanitize-url@7.1.2': resolution: {integrity: sha512-jigsZK+sMF/cuiB7sERuo9V7N9jx+dhmHHnQyDSVdpZwVutaBu7WvNYqMDLSgFgfB30n452TP3vjDAvFC973mA==} @@ -1122,14 +1098,6 @@ packages: cpu: [x64] os: [win32] - '@noble/ciphers@2.1.1': - resolution: {integrity: sha512-bysYuiVfhxNJuldNXlFEitTVdNnYUc+XNJZd7Qm2a5j1vZHgY+fazadNFWFaMK/2vye0JVlxV3gHmC0WDfAOQw==} - engines: {node: '>= 20.19.0'} - - '@noble/hashes@2.0.1': - resolution: {integrity: sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==} - engines: {node: '>= 20.19.0'} - '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -1793,141 +1761,141 @@ packages: resolution: {integrity: sha512-FqALmHI8D4o6lk/LRWDnhw95z5eO+eAa6ORjVg09YRR7BkcM6oPHU9uyC0gtQG5vpFLvgpeU4+zEAz2H8APHNw==} engines: {node: '>= 10'} - '@rollup/rollup-android-arm-eabi@4.60.1': - resolution: {integrity: sha512-d6FinEBLdIiK+1uACUttJKfgZREXrF0Qc2SmLII7W2AD8FfiZ9Wjd+rD/iRuf5s5dWrr1GgwXCvPqOuDquOowA==} + '@rollup/rollup-android-arm-eabi@4.60.2': + resolution: {integrity: sha512-dnlp69efPPg6Uaw2dVqzWRfAWRnYVb1XJ8CyyhIbZeaq4CA5/mLeZ1IEt9QqQxmbdvagjLIm2ZL8BxXv5lH4Yw==} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.60.1': - resolution: {integrity: sha512-YjG/EwIDvvYI1YvYbHvDz/BYHtkY4ygUIXHnTdLhG+hKIQFBiosfWiACWortsKPKU/+dUwQQCKQM3qrDe8c9BA==} + '@rollup/rollup-android-arm64@4.60.2': + resolution: {integrity: sha512-OqZTwDRDchGRHHm/hwLOL7uVPB9aUvI0am/eQuWMNyFHf5PSEQmyEeYYheA0EPPKUO/l0uigCp+iaTjoLjVoHg==} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.60.1': - resolution: {integrity: sha512-mjCpF7GmkRtSJwon+Rq1N8+pI+8l7w5g9Z3vWj4T7abguC4Czwi3Yu/pFaLvA3TTeMVjnu3ctigusqWUfjZzvw==} + '@rollup/rollup-darwin-arm64@4.60.2': + resolution: {integrity: sha512-UwRE7CGpvSVEQS8gUMBe1uADWjNnVgP3Iusyda1nSRwNDCsRjnGc7w6El6WLQsXmZTbLZx9cecegumcitNfpmA==} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.60.1': - resolution: {integrity: sha512-haZ7hJ1JT4e9hqkoT9R/19XW2QKqjfJVv+i5AGg57S+nLk9lQnJ1F/eZloRO3o9Scy9CM3wQ9l+dkXtcBgN5Ew==} + '@rollup/rollup-darwin-x64@4.60.2': + resolution: {integrity: sha512-gjEtURKLCC5VXm1I+2i1u9OhxFsKAQJKTVB8WvDAHF+oZlq0GTVFOlTlO1q3AlCTE/DF32c16ESvfgqR7343/g==} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.60.1': - resolution: {integrity: sha512-czw90wpQq3ZsAVBlinZjAYTKduOjTywlG7fEeWKUA7oCmpA8xdTkxZZlwNJKWqILlq0wehoZcJYfBvOyhPTQ6w==} + '@rollup/rollup-freebsd-arm64@4.60.2': + resolution: {integrity: sha512-Bcl6CYDeAgE70cqZaMojOi/eK63h5Me97ZqAQoh77VPjMysA/4ORQBRGo3rRy45x4MzVlU9uZxs8Uwy7ZaKnBw==} cpu: [arm64] os: [freebsd] - '@rollup/rollup-freebsd-x64@4.60.1': - resolution: {integrity: sha512-KVB2rqsxTHuBtfOeySEyzEOB7ltlB/ux38iu2rBQzkjbwRVlkhAGIEDiiYnO2kFOkJp+Z7pUXKyrRRFuFUKt+g==} + '@rollup/rollup-freebsd-x64@4.60.2': + resolution: {integrity: sha512-LU+TPda3mAE2QB0/Hp5VyeKJivpC6+tlOXd1VMoXV/YFMvk/MNk5iXeBfB4MQGRWyOYVJ01625vjkr0Az98OJQ==} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.60.1': - resolution: {integrity: sha512-L+34Qqil+v5uC0zEubW7uByo78WOCIrBvci69E7sFASRl0X7b/MB6Cqd1lky/CtcSVTydWa2WZwFuWexjS5o6g==} + '@rollup/rollup-linux-arm-gnueabihf@4.60.2': + resolution: {integrity: sha512-2QxQrM+KQ7DAW4o22j+XZ6RKdxjLD7BOWTP0Bv0tmjdyhXSsr2Ul1oJDQqh9Zf5qOwTuTc7Ek83mOFaKnodPjg==} cpu: [arm] os: [linux] libc: [glibc] - '@rollup/rollup-linux-arm-musleabihf@4.60.1': - resolution: {integrity: sha512-n83O8rt4v34hgFzlkb1ycniJh7IR5RCIqt6mz1VRJD6pmhRi0CXdmfnLu9dIUS6buzh60IvACM842Ffb3xd6Gg==} + '@rollup/rollup-linux-arm-musleabihf@4.60.2': + resolution: {integrity: sha512-TbziEu2DVsTEOPif2mKWkMeDMLoYjx95oESa9fkQQK7r/Orta0gnkcDpzwufEcAO2BLBsD7mZkXGFqEdMRRwfw==} cpu: [arm] os: [linux] libc: [musl] - '@rollup/rollup-linux-arm64-gnu@4.60.1': - resolution: {integrity: sha512-Nql7sTeAzhTAja3QXeAI48+/+GjBJ+QmAH13snn0AJSNL50JsDqotyudHyMbO2RbJkskbMbFJfIJKWA6R1LCJQ==} + '@rollup/rollup-linux-arm64-gnu@4.60.2': + resolution: {integrity: sha512-bO/rVDiDUuM2YfuCUwZ1t1cP+/yqjqz+Xf2VtkdppefuOFS2OSeAfgafaHNkFn0t02hEyXngZkxtGqXcXwO8Rg==} cpu: [arm64] os: [linux] libc: [glibc] - '@rollup/rollup-linux-arm64-musl@4.60.1': - resolution: {integrity: sha512-+pUymDhd0ys9GcKZPPWlFiZ67sTWV5UU6zOJat02M1+PiuSGDziyRuI/pPue3hoUwm2uGfxdL+trT6Z9rxnlMA==} + '@rollup/rollup-linux-arm64-musl@4.60.2': + resolution: {integrity: sha512-hr26p7e93Rl0Za+JwW7EAnwAvKkehh12BU1Llm9Ykiibg4uIr2rbpxG9WCf56GuvidlTG9KiiQT/TXT1yAWxTA==} cpu: [arm64] os: [linux] libc: [musl] - '@rollup/rollup-linux-loong64-gnu@4.60.1': - resolution: {integrity: sha512-VSvgvQeIcsEvY4bKDHEDWcpW4Yw7BtlKG1GUT4FzBUlEKQK0rWHYBqQt6Fm2taXS+1bXvJT6kICu5ZwqKCnvlQ==} + '@rollup/rollup-linux-loong64-gnu@4.60.2': + resolution: {integrity: sha512-pOjB/uSIyDt+ow3k/RcLvUAOGpysT2phDn7TTUB3n75SlIgZzM6NKAqlErPhoFU+npgY3/n+2HYIQVbF70P9/A==} cpu: [loong64] os: [linux] libc: [glibc] - '@rollup/rollup-linux-loong64-musl@4.60.1': - resolution: {integrity: sha512-4LqhUomJqwe641gsPp6xLfhqWMbQV04KtPp7/dIp0nzPxAkNY1AbwL5W0MQpcalLYk07vaW9Kp1PBhdpZYYcEw==} + '@rollup/rollup-linux-loong64-musl@4.60.2': + resolution: {integrity: sha512-2/w+q8jszv9Ww1c+6uJT3OwqhdmGP2/4T17cu8WuwyUuuaCDDJ2ojdyYwZzCxx0GcsZBhzi3HmH+J5pZNXnd+Q==} cpu: [loong64] os: [linux] libc: [musl] - '@rollup/rollup-linux-ppc64-gnu@4.60.1': - resolution: {integrity: sha512-tLQQ9aPvkBxOc/EUT6j3pyeMD6Hb8QF2BTBnCQWP/uu1lhc9AIrIjKnLYMEroIz/JvtGYgI9dF3AxHZNaEH0rw==} + '@rollup/rollup-linux-ppc64-gnu@4.60.2': + resolution: {integrity: sha512-11+aL5vKheYgczxtPVVRhdptAM2H7fcDR5Gw4/bTcteuZBlH4oP9f5s9zYO9aGZvoGeBpqXI/9TZZihZ609wKw==} cpu: [ppc64] os: [linux] libc: [glibc] - '@rollup/rollup-linux-ppc64-musl@4.60.1': - resolution: {integrity: sha512-RMxFhJwc9fSXP6PqmAz4cbv3kAyvD1etJFjTx4ONqFP9DkTkXsAMU4v3Vyc5BgzC+anz7nS/9tp4obsKfqkDHg==} + '@rollup/rollup-linux-ppc64-musl@4.60.2': + resolution: {integrity: sha512-i16fokAGK46IVZuV8LIIwMdtqhin9hfYkCh8pf8iC3QU3LpwL+1FSFGej+O7l3E/AoknL6Dclh2oTdnRMpTzFQ==} cpu: [ppc64] os: [linux] libc: [musl] - '@rollup/rollup-linux-riscv64-gnu@4.60.1': - resolution: {integrity: sha512-QKgFl+Yc1eEk6MmOBfRHYF6lTxiiiV3/z/BRrbSiW2I7AFTXoBFvdMEyglohPj//2mZS4hDOqeB0H1ACh3sBbg==} + '@rollup/rollup-linux-riscv64-gnu@4.60.2': + resolution: {integrity: sha512-49FkKS6RGQoriDSK/6E2GkAsAuU5kETFCh7pG4yD/ylj9rKhTmO3elsnmBvRD4PgJPds5W2PkhC82aVwmUcJ7A==} cpu: [riscv64] os: [linux] libc: [glibc] - '@rollup/rollup-linux-riscv64-musl@4.60.1': - resolution: {integrity: sha512-RAjXjP/8c6ZtzatZcA1RaQr6O1TRhzC+adn8YZDnChliZHviqIjmvFwHcxi4JKPSDAt6Uhf/7vqcBzQJy0PDJg==} + '@rollup/rollup-linux-riscv64-musl@4.60.2': + resolution: {integrity: sha512-mjYNkHPfGpUR00DuM1ZZIgs64Hpf4bWcz9Z41+4Q+pgDx73UwWdAYyf6EG/lRFldmdHHzgrYyge5akFUW0D3mQ==} cpu: [riscv64] os: [linux] libc: [musl] - '@rollup/rollup-linux-s390x-gnu@4.60.1': - resolution: {integrity: sha512-wcuocpaOlaL1COBYiA89O6yfjlp3RwKDeTIA0hM7OpmhR1Bjo9j31G1uQVpDlTvwxGn2nQs65fBFL5UFd76FcQ==} + '@rollup/rollup-linux-s390x-gnu@4.60.2': + resolution: {integrity: sha512-ALyvJz965BQk8E9Al/JDKKDLH2kfKFLTGMlgkAbbYtZuJt9LU8DW3ZoDMCtQpXAltZxwBHevXz5u+gf0yA0YoA==} cpu: [s390x] os: [linux] libc: [glibc] - '@rollup/rollup-linux-x64-gnu@4.60.1': - resolution: {integrity: sha512-77PpsFQUCOiZR9+LQEFg9GClyfkNXj1MP6wRnzYs0EeWbPcHs02AXu4xuUbM1zhwn3wqaizle3AEYg5aeoohhg==} + '@rollup/rollup-linux-x64-gnu@4.60.2': + resolution: {integrity: sha512-UQjrkIdWrKI626Du8lCQ6MJp/6V1LAo2bOK9OTu4mSn8GGXIkPXk/Vsp4bLHCd9Z9Iz2OTEaokUE90VweJgIYQ==} cpu: [x64] os: [linux] libc: [glibc] - '@rollup/rollup-linux-x64-musl@4.60.1': - resolution: {integrity: sha512-5cIATbk5vynAjqqmyBjlciMJl1+R/CwX9oLk/EyiFXDWd95KpHdrOJT//rnUl4cUcskrd0jCCw3wpZnhIHdD9w==} + '@rollup/rollup-linux-x64-musl@4.60.2': + resolution: {integrity: sha512-bTsRGj6VlSdn/XD4CGyzMnzaBs9bsRxy79eTqTCBsA8TMIEky7qg48aPkvJvFe1HyzQ5oMZdg7AnVlWQSKLTnw==} cpu: [x64] os: [linux] libc: [musl] - '@rollup/rollup-openbsd-x64@4.60.1': - resolution: {integrity: sha512-cl0w09WsCi17mcmWqqglez9Gk8isgeWvoUZ3WiJFYSR3zjBQc2J5/ihSjpl+VLjPqjQ/1hJRcqBfLjssREQILw==} + '@rollup/rollup-openbsd-x64@4.60.2': + resolution: {integrity: sha512-6d4Z3534xitaA1FcMWP7mQPq5zGwBmGbhphh2DwaA1aNIXUu3KTOfwrWpbwI4/Gr0uANo7NTtaykFyO2hPuFLg==} cpu: [x64] os: [openbsd] - '@rollup/rollup-openharmony-arm64@4.60.1': - resolution: {integrity: sha512-4Cv23ZrONRbNtbZa37mLSueXUCtN7MXccChtKpUnQNgF010rjrjfHx3QxkS2PI7LqGT5xXyYs1a7LbzAwT0iCA==} + '@rollup/rollup-openharmony-arm64@4.60.2': + resolution: {integrity: sha512-NetAg5iO2uN7eB8zE5qrZ3CSil+7IJt4WDFLcC75Ymywq1VZVD6qJ6EvNLjZ3rEm6gB7XW5JdT60c6MN35Z85Q==} cpu: [arm64] os: [openharmony] - '@rollup/rollup-win32-arm64-msvc@4.60.1': - resolution: {integrity: sha512-i1okWYkA4FJICtr7KpYzFpRTHgy5jdDbZiWfvny21iIKky5YExiDXP+zbXzm3dUcFpkEeYNHgQ5fuG236JPq0g==} + '@rollup/rollup-win32-arm64-msvc@4.60.2': + resolution: {integrity: sha512-NCYhOotpgWZ5kdxCZsv6Iudx0wX8980Q/oW4pNFNihpBKsDbEA1zpkfxJGC0yugsUuyDZ7gL37dbzwhR0VI7pQ==} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.60.1': - resolution: {integrity: sha512-u09m3CuwLzShA0EYKMNiFgcjjzwqtUMLmuCJLeZWjjOYA3IT2Di09KaxGBTP9xVztWyIWjVdsB2E9goMjZvTQg==} + '@rollup/rollup-win32-ia32-msvc@4.60.2': + resolution: {integrity: sha512-RXsaOqXxfoUBQoOgvmmijVxJnW2IGB0eoMO7F8FAjaj0UTywUO/luSqimWBJn04WNgUkeNhh7fs7pESXajWmkg==} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-gnu@4.60.1': - resolution: {integrity: sha512-k+600V9Zl1CM7eZxJgMyTUzmrmhB/0XZnF4pRypKAlAgxmedUA+1v9R+XOFv56W4SlHEzfeMtzujLJD22Uz5zg==} + '@rollup/rollup-win32-x64-gnu@4.60.2': + resolution: {integrity: sha512-qdAzEULD+/hzObedtmV6iBpdL5TIbKVztGiK7O3/KYSf+HIzU257+MX1EXJcyIiDbMAqmbwaufcYPvyRryeZtA==} cpu: [x64] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.60.1': - resolution: {integrity: sha512-lWMnixq/QzxyhTV6NjQJ4SFo1J6PvOX8vUx5Wb4bBPsEb+8xZ89Bz6kOXpfXj9ak9AHTQVQzlgzBEc1SyM27xQ==} + '@rollup/rollup-win32-x64-msvc@4.60.2': + resolution: {integrity: sha512-Nd/SgG27WoA9e+/TdK74KnHz852TLa94ovOYySo/yMPuTmpckK/jIF2jSwS3g7ELSKXK13/cVdmg1Z/DaCWKxA==} cpu: [x64] os: [win32] @@ -2746,76 +2714,6 @@ packages: best-effort-json-parser@1.2.1: resolution: {integrity: sha512-UICSLibQdzS1f+PBsi3u2YE3SsdXcWicHUg3IMvfuaePS2AYnZJdJeKhGv5OM8/mqJwPt79aDrEJ1oa84tELvw==} - better-auth@1.4.18: - resolution: {integrity: sha512-bnyifLWBPcYVltH3RhS7CM62MoelEqC6Q+GnZwfiDWNfepXoQZBjEvn4urcERC7NTKgKq5zNBM8rvPvRBa6xcg==} - peerDependencies: - '@lynx-js/react': '*' - '@prisma/client': ^5.0.0 || ^6.0.0 || ^7.0.0 - '@sveltejs/kit': ^2.0.0 - '@tanstack/react-start': ^1.0.0 - '@tanstack/solid-start': ^1.0.0 - better-sqlite3: ^12.0.0 - drizzle-kit: '>=0.31.4' - drizzle-orm: '>=0.41.0' - mongodb: ^6.0.0 || ^7.0.0 - mysql2: ^3.0.0 - next: ^14.0.0 || ^15.0.0 || ^16.0.0 - pg: ^8.0.0 - prisma: ^5.0.0 || ^6.0.0 || ^7.0.0 - react: ^18.0.0 || ^19.0.0 - react-dom: ^18.0.0 || ^19.0.0 - solid-js: ^1.0.0 - svelte: ^4.0.0 || ^5.0.0 - vitest: ^2.0.0 || ^3.0.0 || ^4.0.0 - vue: ^3.0.0 - peerDependenciesMeta: - '@lynx-js/react': - optional: true - '@prisma/client': - optional: true - '@sveltejs/kit': - optional: true - '@tanstack/react-start': - optional: true - '@tanstack/solid-start': - optional: true - better-sqlite3: - optional: true - drizzle-kit: - optional: true - drizzle-orm: - optional: true - mongodb: - optional: true - mysql2: - optional: true - next: - optional: true - pg: - optional: true - prisma: - optional: true - react: - optional: true - react-dom: - optional: true - solid-js: - optional: true - svelte: - optional: true - vitest: - optional: true - vue: - optional: true - - better-call@1.1.8: - resolution: {integrity: sha512-XMQ2rs6FNXasGNfMjzbyroSwKwYbZ/T3IxruSS6U2MJRsSYh3wYtG3o6H00ZlKZ/C/UPOAD97tqgQJNsxyeTXw==} - peerDependencies: - zod: ^4.0.0 - peerDependenciesMeta: - zod: - optional: true - better-react-mathjax@2.3.0: resolution: {integrity: sha512-K0ceQC+jQmB+NLDogO5HCpqmYf18AU2FxDbLdduYgkHYWZApFggkHE4dIaXCV1NqeoscESYXXo1GSkY6fA295w==} peerDependencies: @@ -3813,8 +3711,8 @@ packages: resolution: {integrity: sha512-Ox1pJVrDCyGHMG9CFg1tmrRUMRPRsAWYc/PinY0XzJU4K7y7vjNoLKIQ7BR5UJMCxNN8EM1MNDmHWA/B3aZUuw==} engines: {node: '>=6'} - hookable@6.1.0: - resolution: {integrity: sha512-ZoKZSJgu8voGK2geJS+6YtYjvIzu9AOM/KZXsBxr83uhLL++e9pEv/dlgwgy3dvHg06kTz6JOh1hk3C8Ceiymw==} + hookable@6.1.1: + resolution: {integrity: sha512-U9LYDy1CwhMCnprUfeAZWZGByVbhd54hwepegYTK7Pi5NvqEj63ifz5z+xukznehT7i6NIZRu89Ay1AZmRsLEQ==} html-url-attributes@3.0.1: resolution: {integrity: sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ==} @@ -4048,9 +3946,6 @@ packages: resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==} hasBin: true - jose@6.1.3: - resolution: {integrity: sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ==} - js-tiktoken@1.0.21: resolution: {integrity: sha512-biOj/6M5qdgx5TKjDnFT1ymSpM5tbd3ylwDtrQvFQSu0Z7bBYko2dF+W/aUkXUPuk6IVpRxk/3Q2sHOzGlS36g==} @@ -4101,10 +3996,6 @@ packages: knitwork@1.3.0: resolution: {integrity: sha512-4LqMNoONzR43B1W0ek0fhXMsDNW/zxa1NdFAVMY+k28pgZLovR4G3PB5MrpTxCy1QaZCqNoiaKPr5w5qZHfSNw==} - kysely@0.28.11: - resolution: {integrity: sha512-zpGIFg0HuoC893rIjYX1BETkVWdDnzTzF5e0kWXJFg5lE0k1/LfNWBejrcnOFu8Q2Rfq/hTDTU7XLUM8QOrpzg==} - engines: {node: '>=20.0.0'} - langium@3.3.1: resolution: {integrity: sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==} engines: {node: '>=16.0.0'} @@ -4243,8 +4134,8 @@ packages: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true - lru-cache@11.3.3: - resolution: {integrity: sha512-JvNw9Y81y33E+BEYPr0U7omo+U9AySnsMsEiXgwT6yqd31VQWTLNQqmT4ou5eqPFUrTfIDFta2wKhB1hyohtAQ==} + lru-cache@11.3.5: + resolution: {integrity: sha512-NxVFwLAnrd9i7KUBxC4DrUhmgjzOs+1Qm50D3oF1/oL+r1NpZ4gA7xvG0/zJ8evR7zIKn4vLf7qTNduWFtCrRw==} engines: {node: 20 || >=22} lucide-react@0.542.0: @@ -4533,10 +4424,6 @@ packages: engines: {node: ^18 || >=20} hasBin: true - nanostores@1.1.0: - resolution: {integrity: sha512-yJBmDJr18xy47dbNVlHcgdPrulSn1nhSE6Ns9vTG+Nx9VPT6iV1MD6aQFp/t52zpf82FhLLTXAXr30NuCnxvwA==} - engines: {node: ^20.0.0 || >=22.0.0} - napi-postinstall@0.3.4: resolution: {integrity: sha512-PHI5f1O0EP5xJ9gQmFGMS6IZcrVvTjpXjz7Na41gTE7eE2hK11lg04CECCYEEjdc17EV4DO+fkGEtt7TpTaTiQ==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} @@ -4834,12 +4721,12 @@ packages: resolution: {integrity: sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==} engines: {node: ^10 || ^12 || >=14} - postcss@8.5.6: - resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} + postcss@8.5.10: + resolution: {integrity: sha512-pMMHxBOZKFU6HgAZ4eyGnwXF/EvPGGqUr0MnZ5+99485wwW41kW91A4LOGxSHhgugZmSChL5AlElNdwlNgcnLQ==} engines: {node: ^10 || ^12 || >=14} - postcss@8.5.9: - resolution: {integrity: sha512-7a70Nsot+EMX9fFU3064K/kdHWZqGVY+BADLyXc8Dfv+mTLLVl6JzJpPaCZ2kQL9gIJvKXSLMHhqdRRjwQeFtw==} + postcss@8.5.6: + resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} engines: {node: ^10 || ^12 || >=14} prelude-ls@1.2.1: @@ -5125,14 +5012,11 @@ packages: robust-predicates@3.0.2: resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==} - rollup@4.60.1: - resolution: {integrity: sha512-VmtB2rFU/GroZ4oL8+ZqXgSA38O6GR8KSIvWmEFv63pQ0G6KaBH9s07PO8XTXP4vI+3UJUEypOfjkGfmSBBR0w==} + rollup@4.60.2: + resolution: {integrity: sha512-J9qZyW++QK/09NyN/zeO0dG/1GdGfyp9lV8ajHnRVLfo/uFsbji5mHnDgn/qYdUHyCkM2N+8VyspgZclfAh0eQ==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true - rou3@0.7.12: - resolution: {integrity: sha512-iFE4hLDuloSWcD7mjdCDhx2bKcIsYbtOTpfH5MHHLSKMOUyjqQXTeZVa289uuwEGEKFoE/BAPbhaU4B774nceg==} - roughjs@4.6.6: resolution: {integrity: sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==} @@ -5185,9 +5069,6 @@ packages: server-only@0.0.1: resolution: {integrity: sha512-qepMx2JxAa5jjfzxG79yPPq+8BuFToHd1hm7kI+Z4zAq1ftQiP7HcxMhDDItrbtwVeLg/cY2JnKnrcFkmiswNA==} - set-cookie-parser@2.7.2: - resolution: {integrity: sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==} - set-function-length@1.2.2: resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==} engines: {node: '>= 0.4'} @@ -5685,6 +5566,10 @@ packages: resolution: {integrity: sha512-XQegIaBTVUjSHliKqcnFqYypAd4S+WCYt5NIeRs6w/UAry7z8Y9j5ZwRRL4kzq9U3sD6v+85er9FvkEaBpji2w==} hasBin: true + uuid@14.0.0: + resolution: {integrity: sha512-Qo+uWgilfSmAhXCMav1uYFynlQO7fMFiMVZsQqZRMIXp0O7rR7qjkj+cPvBHLgBqi960QCoo/PH2/6ZtVqKvrg==} + hasBin: true + vfile-location@5.0.3: resolution: {integrity: sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==} @@ -5948,27 +5833,6 @@ snapshots: '@babel/helper-string-parser': 7.27.1 '@babel/helper-validator-identifier': 7.28.5 - '@better-auth/core@1.4.18(@better-auth/utils@0.3.0)(@better-fetch/fetch@1.1.21)(better-call@1.1.8(zod@3.25.76))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.0)': - dependencies: - '@better-auth/utils': 0.3.0 - '@better-fetch/fetch': 1.1.21 - '@standard-schema/spec': 1.1.0 - better-call: 1.1.8(zod@4.3.6) - jose: 6.1.3 - kysely: 0.28.11 - nanostores: 1.1.0 - zod: 4.3.6 - - '@better-auth/telemetry@1.4.18(@better-auth/core@1.4.18(@better-auth/utils@0.3.0)(@better-fetch/fetch@1.1.21)(better-call@1.1.8(zod@3.25.76))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.0))': - dependencies: - '@better-auth/core': 1.4.18(@better-auth/utils@0.3.0)(@better-fetch/fetch@1.1.21)(better-call@1.1.8(zod@3.25.76))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.0) - '@better-auth/utils': 0.3.0 - '@better-fetch/fetch': 1.1.21 - - '@better-auth/utils@0.3.0': {} - - '@better-fetch/fetch@1.1.21': {} - '@braintree/sanitize-url@7.1.2': {} '@cfworker/json-schema@4.1.1': {} @@ -6817,10 +6681,6 @@ snapshots: '@next/swc-win32-x64-msvc@16.1.7': optional: true - '@noble/ciphers@2.1.1': {} - - '@noble/hashes@2.0.1': {} - '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 @@ -7478,79 +7338,79 @@ snapshots: '@resvg/resvg-wasm@2.6.2': {} - '@rollup/rollup-android-arm-eabi@4.60.1': + '@rollup/rollup-android-arm-eabi@4.60.2': optional: true - '@rollup/rollup-android-arm64@4.60.1': + '@rollup/rollup-android-arm64@4.60.2': optional: true - '@rollup/rollup-darwin-arm64@4.60.1': + '@rollup/rollup-darwin-arm64@4.60.2': optional: true - '@rollup/rollup-darwin-x64@4.60.1': + '@rollup/rollup-darwin-x64@4.60.2': optional: true - '@rollup/rollup-freebsd-arm64@4.60.1': + '@rollup/rollup-freebsd-arm64@4.60.2': optional: true - '@rollup/rollup-freebsd-x64@4.60.1': + '@rollup/rollup-freebsd-x64@4.60.2': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.60.1': + '@rollup/rollup-linux-arm-gnueabihf@4.60.2': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.60.1': + '@rollup/rollup-linux-arm-musleabihf@4.60.2': optional: true - '@rollup/rollup-linux-arm64-gnu@4.60.1': + '@rollup/rollup-linux-arm64-gnu@4.60.2': optional: true - '@rollup/rollup-linux-arm64-musl@4.60.1': + '@rollup/rollup-linux-arm64-musl@4.60.2': optional: true - '@rollup/rollup-linux-loong64-gnu@4.60.1': + '@rollup/rollup-linux-loong64-gnu@4.60.2': optional: true - '@rollup/rollup-linux-loong64-musl@4.60.1': + '@rollup/rollup-linux-loong64-musl@4.60.2': optional: true - '@rollup/rollup-linux-ppc64-gnu@4.60.1': + '@rollup/rollup-linux-ppc64-gnu@4.60.2': optional: true - '@rollup/rollup-linux-ppc64-musl@4.60.1': + '@rollup/rollup-linux-ppc64-musl@4.60.2': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.60.1': + '@rollup/rollup-linux-riscv64-gnu@4.60.2': optional: true - '@rollup/rollup-linux-riscv64-musl@4.60.1': + '@rollup/rollup-linux-riscv64-musl@4.60.2': optional: true - '@rollup/rollup-linux-s390x-gnu@4.60.1': + '@rollup/rollup-linux-s390x-gnu@4.60.2': optional: true - '@rollup/rollup-linux-x64-gnu@4.60.1': + '@rollup/rollup-linux-x64-gnu@4.60.2': optional: true - '@rollup/rollup-linux-x64-musl@4.60.1': + '@rollup/rollup-linux-x64-musl@4.60.2': optional: true - '@rollup/rollup-openbsd-x64@4.60.1': + '@rollup/rollup-openbsd-x64@4.60.2': optional: true - '@rollup/rollup-openharmony-arm64@4.60.1': + '@rollup/rollup-openharmony-arm64@4.60.2': optional: true - '@rollup/rollup-win32-arm64-msvc@4.60.1': + '@rollup/rollup-win32-arm64-msvc@4.60.2': optional: true - '@rollup/rollup-win32-ia32-msvc@4.60.1': + '@rollup/rollup-win32-ia32-msvc@4.60.2': optional: true - '@rollup/rollup-win32-x64-gnu@4.60.1': + '@rollup/rollup-win32-x64-gnu@4.60.2': optional: true - '@rollup/rollup-win32-x64-msvc@4.60.1': + '@rollup/rollup-win32-x64-msvc@4.60.2': optional: true '@rtsao/scc@1.1.0': {} @@ -8093,7 +7953,7 @@ snapshots: '@unhead/vue@2.1.4(vue@3.5.28(typescript@5.9.3))': dependencies: - hookable: 6.1.0 + hookable: 6.1.1 unhead: 2.1.4 vue: 3.5.28(typescript@5.9.3) @@ -8244,7 +8104,7 @@ snapshots: '@vue/shared': 3.5.28 estree-walker: 2.0.2 magic-string: 0.30.21 - postcss: 8.5.9 + postcss: 8.5.10 source-map-js: 1.2.1 '@vue/compiler-ssr@3.5.28': @@ -8442,36 +8302,6 @@ snapshots: best-effort-json-parser@1.2.1: {} - better-auth@1.4.18(next@16.1.7(@opentelemetry/api@1.9.0)(@playwright/test@1.59.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@4.1.4(@opentelemetry/api@1.9.0)(@types/node@20.19.33)(vite@7.3.1(@types/node@20.19.33)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.3)))(vue@3.5.28(typescript@5.9.3)): - dependencies: - '@better-auth/core': 1.4.18(@better-auth/utils@0.3.0)(@better-fetch/fetch@1.1.21)(better-call@1.1.8(zod@3.25.76))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.0) - '@better-auth/telemetry': 1.4.18(@better-auth/core@1.4.18(@better-auth/utils@0.3.0)(@better-fetch/fetch@1.1.21)(better-call@1.1.8(zod@3.25.76))(jose@6.1.3)(kysely@0.28.11)(nanostores@1.1.0)) - '@better-auth/utils': 0.3.0 - '@better-fetch/fetch': 1.1.21 - '@noble/ciphers': 2.1.1 - '@noble/hashes': 2.0.1 - better-call: 1.1.8(zod@4.3.6) - defu: 6.1.4 - jose: 6.1.3 - kysely: 0.28.11 - nanostores: 1.1.0 - zod: 4.3.6 - optionalDependencies: - next: 16.1.7(@opentelemetry/api@1.9.0)(@playwright/test@1.59.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) - react: 19.2.4 - react-dom: 19.2.4(react@19.2.4) - vitest: 4.1.4(@opentelemetry/api@1.9.0)(@types/node@20.19.33)(vite@7.3.1(@types/node@20.19.33)(jiti@2.6.1)(lightningcss@1.30.2)(yaml@2.8.3)) - vue: 3.5.28(typescript@5.9.3) - - better-call@1.1.8(zod@4.3.6): - dependencies: - '@better-auth/utils': 0.3.0 - '@better-fetch/fetch': 1.1.21 - rou3: 0.7.12 - set-cookie-parser: 2.7.2 - optionalDependencies: - zod: 4.3.6 - better-react-mathjax@2.3.0(react@19.2.4): dependencies: mathjax-full: 3.2.2 @@ -9789,7 +9619,7 @@ snapshots: hex-rgb@4.3.0: {} - hookable@6.1.0: {} + hookable@6.1.1: {} html-url-attributes@3.0.1: {} @@ -9998,8 +9828,6 @@ snapshots: jiti@2.6.1: {} - jose@6.1.3: {} - js-tiktoken@1.0.21: dependencies: base64-js: 1.5.1 @@ -10045,8 +9873,6 @@ snapshots: knitwork@1.3.0: {} - kysely@0.28.11: {} - langium@3.3.1: dependencies: chevrotain: 11.0.3 @@ -10158,7 +9984,7 @@ snapshots: dependencies: js-tokens: 4.0.0 - lru-cache@11.3.3: {} + lru-cache@11.3.5: {} lucide-react@0.542.0(react@19.2.4): dependencies: @@ -10741,8 +10567,6 @@ snapshots: nanoid@5.1.6: {} - nanostores@1.1.0: {} - napi-postinstall@0.3.4: {} natural-compare@1.4.0: {} @@ -11152,13 +10976,13 @@ snapshots: picocolors: 1.1.1 source-map-js: 1.2.1 - postcss@8.5.6: + postcss@8.5.10: dependencies: nanoid: 3.3.11 picocolors: 1.1.1 source-map-js: 1.2.1 - postcss@8.5.9: + postcss@8.5.6: dependencies: nanoid: 3.3.11 picocolors: 1.1.1 @@ -11493,39 +11317,37 @@ snapshots: robust-predicates@3.0.2: {} - rollup@4.60.1: + rollup@4.60.2: dependencies: '@types/estree': 1.0.8 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.60.1 - '@rollup/rollup-android-arm64': 4.60.1 - '@rollup/rollup-darwin-arm64': 4.60.1 - '@rollup/rollup-darwin-x64': 4.60.1 - '@rollup/rollup-freebsd-arm64': 4.60.1 - '@rollup/rollup-freebsd-x64': 4.60.1 - '@rollup/rollup-linux-arm-gnueabihf': 4.60.1 - '@rollup/rollup-linux-arm-musleabihf': 4.60.1 - '@rollup/rollup-linux-arm64-gnu': 4.60.1 - '@rollup/rollup-linux-arm64-musl': 4.60.1 - '@rollup/rollup-linux-loong64-gnu': 4.60.1 - '@rollup/rollup-linux-loong64-musl': 4.60.1 - '@rollup/rollup-linux-ppc64-gnu': 4.60.1 - '@rollup/rollup-linux-ppc64-musl': 4.60.1 - '@rollup/rollup-linux-riscv64-gnu': 4.60.1 - '@rollup/rollup-linux-riscv64-musl': 4.60.1 - '@rollup/rollup-linux-s390x-gnu': 4.60.1 - '@rollup/rollup-linux-x64-gnu': 4.60.1 - '@rollup/rollup-linux-x64-musl': 4.60.1 - '@rollup/rollup-openbsd-x64': 4.60.1 - '@rollup/rollup-openharmony-arm64': 4.60.1 - '@rollup/rollup-win32-arm64-msvc': 4.60.1 - '@rollup/rollup-win32-ia32-msvc': 4.60.1 - '@rollup/rollup-win32-x64-gnu': 4.60.1 - '@rollup/rollup-win32-x64-msvc': 4.60.1 + '@rollup/rollup-android-arm-eabi': 4.60.2 + '@rollup/rollup-android-arm64': 4.60.2 + '@rollup/rollup-darwin-arm64': 4.60.2 + '@rollup/rollup-darwin-x64': 4.60.2 + '@rollup/rollup-freebsd-arm64': 4.60.2 + '@rollup/rollup-freebsd-x64': 4.60.2 + '@rollup/rollup-linux-arm-gnueabihf': 4.60.2 + '@rollup/rollup-linux-arm-musleabihf': 4.60.2 + '@rollup/rollup-linux-arm64-gnu': 4.60.2 + '@rollup/rollup-linux-arm64-musl': 4.60.2 + '@rollup/rollup-linux-loong64-gnu': 4.60.2 + '@rollup/rollup-linux-loong64-musl': 4.60.2 + '@rollup/rollup-linux-ppc64-gnu': 4.60.2 + '@rollup/rollup-linux-ppc64-musl': 4.60.2 + '@rollup/rollup-linux-riscv64-gnu': 4.60.2 + '@rollup/rollup-linux-riscv64-musl': 4.60.2 + '@rollup/rollup-linux-s390x-gnu': 4.60.2 + '@rollup/rollup-linux-x64-gnu': 4.60.2 + '@rollup/rollup-linux-x64-musl': 4.60.2 + '@rollup/rollup-openbsd-x64': 4.60.2 + '@rollup/rollup-openharmony-arm64': 4.60.2 + '@rollup/rollup-win32-arm64-msvc': 4.60.2 + '@rollup/rollup-win32-ia32-msvc': 4.60.2 + '@rollup/rollup-win32-x64-gnu': 4.60.2 + '@rollup/rollup-win32-x64-msvc': 4.60.2 fsevents: 2.3.3 - rou3@0.7.12: {} - roughjs@4.6.6: dependencies: hachure-fill: 0.5.2 @@ -11592,8 +11414,6 @@ snapshots: server-only@0.0.1: {} - set-cookie-parser@2.7.2: {} - set-function-length@1.2.2: dependencies: define-data-property: 1.1.4 @@ -12011,7 +11831,7 @@ snapshots: unhead@2.1.4: dependencies: - hookable: 6.1.0 + hookable: 6.1.1 unicode-trie@2.0.0: dependencies: @@ -12119,7 +11939,7 @@ snapshots: chokidar: 5.0.0 destr: 2.0.5 h3: 1.15.11 - lru-cache: 11.3.3 + lru-cache: 11.3.5 node-fetch-native: 1.6.7 ofetch: 1.5.1 ufo: 1.6.3 @@ -12174,6 +11994,8 @@ snapshots: uuid@13.0.0: {} + uuid@14.0.0: {} + vfile-location@5.0.3: dependencies: '@types/unist': 3.0.3 @@ -12194,8 +12016,8 @@ snapshots: esbuild: 0.27.7 fdir: 6.5.0(picomatch@4.0.4) picomatch: 4.0.4 - postcss: 8.5.9 - rollup: 4.60.1 + postcss: 8.5.10 + rollup: 4.60.2 tinyglobby: 0.2.16 optionalDependencies: '@types/node': 20.19.33 diff --git a/frontend/src/app/(auth)/layout.tsx b/frontend/src/app/(auth)/layout.tsx new file mode 100644 index 000000000..0b35d4ac1 --- /dev/null +++ b/frontend/src/app/(auth)/layout.tsx @@ -0,0 +1,46 @@ +import Link from "next/link"; +import { redirect } from "next/navigation"; +import { type ReactNode } from "react"; + +import { AuthProvider } from "@/core/auth/AuthProvider"; +import { getServerSideUser } from "@/core/auth/server"; +import { assertNever } from "@/core/auth/types"; + +export const dynamic = "force-dynamic"; + +export default async function AuthLayout({ + children, +}: { + children: ReactNode; +}) { + const result = await getServerSideUser(); + + switch (result.tag) { + case "authenticated": + redirect("/workspace"); + case "needs_setup": + // Allow access to setup page + return {children}; + case "system_setup_required": + case "unauthenticated": + return {children}; + case "gateway_unavailable": + return ( +
+

+ Service temporarily unavailable. +

+ + Retry + +
+ ); + case "config_error": + throw new Error(result.message); + default: + assertNever(result); + } +} diff --git a/frontend/src/app/(auth)/login/page.tsx b/frontend/src/app/(auth)/login/page.tsx new file mode 100644 index 000000000..82fcf8b90 --- /dev/null +++ b/frontend/src/app/(auth)/login/page.tsx @@ -0,0 +1,213 @@ +"use client"; + +import Link from "next/link"; +import { useRouter, useSearchParams } from "next/navigation"; +import { useTheme } from "next-themes"; +import { useEffect, useState } from "react"; + +import { Button } from "@/components/ui/button"; +import { FlickeringGrid } from "@/components/ui/flickering-grid"; +import { Input } from "@/components/ui/input"; +import { useAuth } from "@/core/auth/AuthProvider"; +import { parseAuthError } from "@/core/auth/types"; + +/** + * Validate next parameter + * Prevent open redirect attacks + * Per RFC-001: Only allow relative paths starting with / + */ +function validateNextParam(next: string | null): string | null { + if (!next) { + return null; + } + + // Need start with / (relative path) + if (!next.startsWith("/")) { + return null; + } + + // Disallow protocol-relative URLs + if ( + next.startsWith("//") || + next.startsWith("http://") || + next.startsWith("https://") + ) { + return null; + } + + // Disallow URLs with different protocols (e.g., javascript:, data:, etc) + if (next.includes(":") && !next.startsWith("/")) { + return null; + } + + // Valid relative path + return next; +} + +export default function LoginPage() { + const router = useRouter(); + const searchParams = useSearchParams(); + const { isAuthenticated } = useAuth(); + const { theme, resolvedTheme } = useTheme(); + + const [email, setEmail] = useState(""); + const [password, setPassword] = useState(""); + const [isLogin, setIsLogin] = useState(true); + const [error, setError] = useState(""); + const [loading, setLoading] = useState(false); + + // Get next parameter for validated redirect + const nextParam = searchParams.get("next"); + const redirectPath = validateNextParam(nextParam) ?? "/workspace"; + + // Redirect if already authenticated (client-side, post-login) + useEffect(() => { + if (isAuthenticated) { + router.push(redirectPath); + } + }, [isAuthenticated, redirectPath, router]); + + // Redirect to setup if the system has no users yet + useEffect(() => { + let cancelled = false; + + void fetch("/api/v1/auth/setup-status") + .then((r) => r.json()) + .then((data: { needs_setup?: boolean }) => { + if (!cancelled && data.needs_setup) { + router.push("/setup"); + } + }) + .catch(() => { + // Ignore errors; user stays on login page + }); + + return () => { + cancelled = true; + }; + }, [router]); + + const handleSubmit = async (e: React.FormEvent) => { + e.preventDefault(); + setError(""); + setLoading(true); + + try { + const endpoint = isLogin + ? "/api/v1/auth/login/local" + : "/api/v1/auth/register"; + const body = isLogin + ? `username=${encodeURIComponent(email)}&password=${encodeURIComponent(password)}` + : JSON.stringify({ email, password }); + + const headers: HeadersInit = isLogin + ? { "Content-Type": "application/x-www-form-urlencoded" } + : { "Content-Type": "application/json" }; + + const res = await fetch(endpoint, { + method: "POST", + headers, + body, + credentials: "include", // Important: include HttpOnly cookie + }); + + if (!res.ok) { + const data = await res.json(); + const authError = parseAuthError(data); + setError(authError.message); + return; + } + + // Both login and register set a cookie — redirect to workspace + router.push(redirectPath); + } catch { + setError("Network error. Please try again."); + } finally { + setLoading(false); + } + }; + + const actualTheme = theme === "system" ? resolvedTheme : theme; + + return ( +
+ +
+
+

DeerFlow

+

+ {isLogin ? "Sign in to your account" : "Create a new account"} +

+
+ +
+
+ + setEmail(e.target.value)} + placeholder="you@example.com" + required + /> +
+
+ + setPassword(e.target.value)} + placeholder="•••••••" + required + minLength={isLogin ? 6 : 8} + /> +
+ + {error &&

{error}

} + + +
+ +
+ +
+ +
+ + ← Back to home + +
+
+
+ ); +} diff --git a/frontend/src/app/(auth)/setup/page.tsx b/frontend/src/app/(auth)/setup/page.tsx new file mode 100644 index 000000000..4f1d21eae --- /dev/null +++ b/frontend/src/app/(auth)/setup/page.tsx @@ -0,0 +1,287 @@ +"use client"; + +import { useRouter } from "next/navigation"; +import { useTheme } from "next-themes"; +import { useEffect, useState } from "react"; + +import { Button } from "@/components/ui/button"; +import { FlickeringGrid } from "@/components/ui/flickering-grid"; +import { Input } from "@/components/ui/input"; +import { getCsrfHeaders } from "@/core/api/fetcher"; +import { useAuth } from "@/core/auth/AuthProvider"; +import { parseAuthError } from "@/core/auth/types"; + +type SetupMode = "loading" | "init_admin" | "change_password"; + +export default function SetupPage() { + const router = useRouter(); + const { user, isAuthenticated } = useAuth(); + const { theme, resolvedTheme } = useTheme(); + const [mode, setMode] = useState("loading"); + + // --- Shared state --- + const [email, setEmail] = useState(""); + const [newPassword, setNewPassword] = useState(""); + const [confirmPassword, setConfirmPassword] = useState(""); + const [error, setError] = useState(""); + const [loading, setLoading] = useState(false); + + // --- Change-password mode only --- + const [currentPassword, setCurrentPassword] = useState(""); + + useEffect(() => { + let cancelled = false; + + if (isAuthenticated && user?.needs_setup) { + setMode("change_password"); + } else if (!isAuthenticated) { + // Check if the system has no users yet + void fetch("/api/v1/auth/setup-status") + .then((r) => r.json()) + .then((data: { needs_setup?: boolean }) => { + if (cancelled) return; + if (data.needs_setup) { + setMode("init_admin"); + } else { + // System already set up and user is not logged in — go to login + router.push("/login"); + } + }) + .catch(() => { + if (!cancelled) router.push("/login"); + }); + } else { + // Authenticated but needs_setup is false — already set up + router.push("/workspace"); + } + + return () => { + cancelled = true; + }; + }, [isAuthenticated, user, router]); + + // ── Init-admin handler ───────────────────────────────────────────── + const handleInitAdmin = async (e: React.SubmitEvent) => { + e.preventDefault(); + setError(""); + + if (newPassword !== confirmPassword) { + setError("Passwords do not match"); + return; + } + + setLoading(true); + try { + const res = await fetch("/api/v1/auth/initialize", { + method: "POST", + headers: { "Content-Type": "application/json" }, + credentials: "include", + body: JSON.stringify({ + email, + password: newPassword, + }), + }); + + if (!res.ok) { + const data = await res.json(); + const authError = parseAuthError(data); + setError(authError.message); + return; + } + + router.push("/workspace"); + } catch { + setError("Network error. Please try again."); + } finally { + setLoading(false); + } + }; + + // ── Change-password handler ──────────────────────────────────────── + const handleChangePassword = async (e: React.SubmitEvent) => { + e.preventDefault(); + setError(""); + + if (newPassword !== confirmPassword) { + setError("Passwords do not match"); + return; + } + if (newPassword.length < 8) { + setError("Password must be at least 8 characters"); + return; + } + + setLoading(true); + try { + const res = await fetch("/api/v1/auth/change-password", { + method: "POST", + headers: { + "Content-Type": "application/json", + ...getCsrfHeaders(), + }, + credentials: "include", + body: JSON.stringify({ + current_password: currentPassword, + new_password: newPassword, + new_email: email || undefined, + }), + }); + + if (!res.ok) { + const data = await res.json(); + const authError = parseAuthError(data); + setError(authError.message); + return; + } + + router.push("/workspace"); + } catch { + setError("Network error. Please try again."); + } finally { + setLoading(false); + } + }; + + const actualTheme = theme === "system" ? resolvedTheme : theme; + + if (mode === "loading") { + return ( +
+

Loading…

+
+ ); + } + + // ── Admin initialization form ────────────────────────────────────── + if (mode === "init_admin") { + return ( +
+ +
+
+

DeerFlow

+

Create admin account

+

+ Set up the administrator account to get started. +

+
+
+
+ + setEmail(e.target.value)} + required + /> +
+
+ + setNewPassword(e.target.value)} + required + minLength={8} + /> +
+
+ + setConfirmPassword(e.target.value)} + required + minLength={8} + /> +
+ {error &&

{error}

} + +
+
+
+ ); + } + + // ── Change-password form (needs_setup after login) ───────────────── + return ( +
+ +
+
+

DeerFlow

+

+ Complete admin account setup +

+

+ Set your real email and a new password. +

+
+
+ setEmail(e.target.value)} + required + /> + setCurrentPassword(e.target.value)} + required + /> + setNewPassword(e.target.value)} + required + minLength={8} + /> + setConfirmPassword(e.target.value)} + required + minLength={8} + /> + {error &&

{error}

} + +
+
+
+ ); +} diff --git a/frontend/src/app/[lang]/docs/layout.tsx b/frontend/src/app/[lang]/docs/layout.tsx index f63d6ae7b..895da1da8 100644 --- a/frontend/src/app/[lang]/docs/layout.tsx +++ b/frontend/src/app/[lang]/docs/layout.tsx @@ -34,14 +34,14 @@ export default async function DocLayout({ children, params }) { } pageMap={pageMap} docsRepositoryBase="https://github.com/bytedance/deerflow/tree/main/frontend/src/content" - footer={