fix(api): fix DSL import, memory loading, and remaining test coverage

1. DSL Import fix: change self._session.commit() to self._session.flush()
   in app_dsl_service.py _create_or_update_app() to avoid "closed transaction"
   error. DSL import now works: export agent app -> import -> new app created.

2. Memory loading attempt: added _load_memory_messages() to AgentV2Node
   that loads TokenBufferMemory from conversation history. However, chatflow
   engine manages conversations differently from easy-UI (conversation may
   not be in DB at query time, or uses ConversationVariablePersistenceLayer
   instead of Message table). Memory needs further investigation.

Test results:
- Multi-turn memory: Turn 1 OK, Turn 2 LLM doesn't see history (needs deeper fix)
- Service API with API Key: PASSED (answer="Sixteen" for 8+8)
- DSL Import: PASSED (status=completed, new app created)
- Token aggregation: PASSED (node=49, workflow=49)

Known: memory in multi-turn chatflow needs to use graphon's built-in
memory mechanism (MemoryConfig on node + ConversationVariablePersistenceLayer)
rather than direct DB query.

Made-with: Cursor
This commit is contained in:
Yansong Zhang 2026-04-09 14:47:55 +08:00
parent b21a443d56
commit e2e16772a1
2 changed files with 52 additions and 2 deletions

View File

@ -303,7 +303,11 @@ class AgentV2Node(Node[AgentV2NodeData]):
return model_instance
def _build_prompt_messages(self, dify_ctx: DifyRunContext) -> list[PromptMessage]:
"""Build prompt messages from the node's prompt_template, resolving variables."""
"""Build prompt messages from the node's prompt_template, resolving variables.
If the node has memory config and a conversation_id exists, conversation
history is loaded and inserted between system and user messages.
"""
variable_pool = self.graph_runtime_state.variable_pool
messages: list[PromptMessage] = []
@ -328,8 +332,54 @@ class AgentV2Node(Node[AgentV2NodeData]):
resolved = self._resolve_variable_template(text_content, variable_pool)
messages.append(UserPromptMessage(content=resolved))
if self.node_data.memory:
history = self._load_memory_messages(dify_ctx)
if history:
system_msgs = [m for m in messages if isinstance(m, SystemPromptMessage)]
other_msgs = [m for m in messages if not isinstance(m, SystemPromptMessage)]
messages = system_msgs + history + other_msgs
return messages
def _load_memory_messages(self, dify_ctx: DifyRunContext) -> list[PromptMessage]:
"""Load conversation history from memory."""
from core.memory.token_buffer_memory import TokenBufferMemory
from models.model import Conversation
conversation_id = get_system_text(
self.graph_runtime_state.variable_pool,
SystemVariableKey.CONVERSATION_ID,
)
if not conversation_id:
return []
try:
from sqlalchemy import select
from extensions.ext_database import db
stmt = select(Conversation).where(Conversation.id == conversation_id)
conversation = db.session.scalar(stmt)
if not conversation:
return []
model_instance = self._fetch_model_instance(dify_ctx)
memory = TokenBufferMemory(conversation=conversation, model_instance=model_instance)
window_size = None
if self.node_data.memory and hasattr(self.node_data.memory, "window"):
window = self.node_data.memory.window
if window and window.enabled:
window_size = window.size
history = memory.get_history_prompt_messages(
max_token_limit=2000,
message_limit=window_size or 50,
)
return list(history)
except Exception:
logger.warning("Failed to load memory for agent-v2 node", exc_info=True)
return []
@staticmethod
def _resolve_variable_template(template: str, variable_pool: Any) -> str:
"""Resolve {{#node.var#}} references in a template string using the variable pool."""

View File

@ -471,7 +471,7 @@ class AppDslService:
app.updated_by = account.id
self._session.add(app)
self._session.commit()
self._session.flush()
app_was_created.send(app, account=account)
# save dependencies