fix: parallel image generation error

upgrade: gemini provider enable setting image ratio
This commit is contained in:
Shu Yao 2026-01-16 16:10:28 +08:00
parent 94f557c391
commit 2833fcd7ff
2 changed files with 32 additions and 9 deletions

View File

@ -558,8 +558,22 @@ class GeminiProvider(ModelProvider):
config_kwargs["safety_settings"] = safety_settings
image_config = params.pop("image_config", None)
aspect_ratio = params.pop("aspect_ratio", None)
if aspect_ratio:
if image_config is None:
image_config = {"aspect_ratio": aspect_ratio}
elif isinstance(image_config, dict):
image_config = dict(image_config)
image_config.setdefault("aspect_ratio", aspect_ratio)
elif isinstance(image_config, genai_types.ImageConfig):
try:
image_config.aspect_ratio = aspect_ratio
except Exception:
image_config = {"aspect_ratio": aspect_ratio}
else:
image_config = {"aspect_ratio": aspect_ratio}
if image_config:
config_kwargs["image_config"] = image_config
config_kwargs["image_config"] = self._coerce_image_config(image_config)
audio_config = params.pop("audio_config", None)
if audio_config:
@ -598,6 +612,16 @@ class GeminiProvider(ModelProvider):
except Exception:
return None
def _coerce_image_config(self, image_config: Any) -> Any:
if isinstance(image_config, genai_types.ImageConfig):
return image_config
if isinstance(image_config, dict):
try:
return genai_types.ImageConfig(**image_config)
except Exception:
return image_config
return image_config
def _build_tools(self, tool_specs: List[ToolSpec]) -> List[genai_types.Tool]:
if not tool_specs:
return []

View File

@ -133,7 +133,7 @@ class AgentNodeExecutor(NodeExecutor):
else:
response_message = response_obj.message
self._persist_message_attachments(response_message)
self._persist_message_attachments(response_message, node.id)
final_message: Message | str = response_message
@ -402,7 +402,7 @@ class AgentNodeExecutor(NodeExecutor):
if input_mode is AgentInputMode.MESSAGES:
if isinstance(thinking_result, Message):
self._persist_message_attachments(thinking_result)
self._persist_message_attachments(thinking_result, node.id)
conversation.append(self._clone_with_source(thinking_result, node.id))
else:
self._append_user_message(conversation, thinking_result, node_id=node.id)
@ -828,7 +828,7 @@ class AgentNodeExecutor(NodeExecutor):
return custom_limit
return default_limit
def _persist_message_attachments(self, message: Message) -> None:
def _persist_message_attachments(self, message: Message, node_id: str) -> None:
"""Register attachments produced by model outputs to the attachment store."""
store = self.context.global_state.get("attachment_store")
if store is None:
@ -838,11 +838,11 @@ class AgentNodeExecutor(NodeExecutor):
if not attachment:
continue
try:
self._persist_single_attachment(store, block)
self._persist_single_attachment(store, block, node_id)
except Exception as exc:
raise RuntimeError(f"Failed to persist attachment '{attachment.name or attachment.attachment_id}': {exc}") from exc
def _persist_single_attachment(self, store: Any, block: MessageBlock) -> None:
def _persist_single_attachment(self, store: Any, block: MessageBlock, node_id: str) -> None:
attachment = block.attachment
if attachment is None:
return
@ -859,8 +859,7 @@ class AgentNodeExecutor(NodeExecutor):
return
workspace_root = self.context.global_state.get("python_workspace_root")
node_id = getattr(self, "_current_node_id", None)
if workspace_root is None or node_id is None:
if workspace_root is None or not node_id:
raise RuntimeError("Workspace or node context missing for attachment persistence")
target_dir = workspace_root / "generated" / node_id
@ -971,7 +970,7 @@ class AgentNodeExecutor(NodeExecutor):
if input_mode is AgentInputMode.MESSAGES:
if isinstance(result, Message):
self._persist_message_attachments(result)
self._persist_message_attachments(result, node.id)
self._reset_conversation_with_user_result(conversation, result, node_id=node.id)
else:
self._reset_conversation_with_user_result(conversation, result, node_id=node.id)