Skip to content

Commit

Permalink
Merge branch 'feat/workflow-backend' into deploy/dev
Browse files Browse the repository at this point in the history
  • Loading branch information
takatost committed Mar 21, 2024
2 parents 700ac35 + 72818e9 commit 472f8b2
Showing 1 changed file with 5 additions and 1 deletion.
6 changes: 5 additions & 1 deletion api/core/workflow/nodes/llm/llm_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,8 @@ def _run(self, variable_pool: VariablePool) -> NodeRunResult:
# fetch prompt messages
prompt_messages, stop = self._fetch_prompt_messages(
node_data=node_data,
query=variable_pool.get_variable_value(['sys', SystemVariable.QUERY.value])
if node_data.memory else None,
inputs=inputs,
files=files,
context=context,
Expand Down Expand Up @@ -391,6 +393,7 @@ def _fetch_memory(self, node_data_memory: Optional[MemoryConfig],
return memory

def _fetch_prompt_messages(self, node_data: LLMNodeData,
query: Optional[str],
inputs: dict[str, str],
files: list[FileVar],
context: Optional[str],
Expand All @@ -400,6 +403,7 @@ def _fetch_prompt_messages(self, node_data: LLMNodeData,
"""
Fetch prompt messages
:param node_data: node data
:param query: query
:param inputs: inputs
:param files: files
:param context: context
Expand All @@ -411,7 +415,7 @@ def _fetch_prompt_messages(self, node_data: LLMNodeData,
prompt_messages = prompt_transform.get_prompt(
prompt_template=node_data.prompt_template,
inputs=inputs,
query='',
query=query if query else '',
files=files,
context=context,
memory_config=node_data.memory,
Expand Down

0 comments on commit 472f8b2

Please sign in to comment.