Skip to content

Commit ebd909a

Browse files
committed
fix: use 'content' instead of 'context' in generate_answer_node_k_level
The PromptTemplate expects 'content' variable but the code was passing 'context', causing KeyError during graph execution. Fixes #995 Signed-off-by: majiayu000 <1835304752@qq.com>
1 parent 621d3a5 commit ebd909a

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

scrapegraphai/nodes/generate_answer_node_k_level.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,10 @@
44

55
from typing import List, Optional
66

7-
from langchain_core.prompts import PromptTemplate
87
from langchain_aws import ChatBedrock
98
from langchain_community.chat_models import ChatOllama
109
from langchain_core.output_parsers import JsonOutputParser
10+
from langchain_core.prompts import PromptTemplate
1111
from langchain_core.runnables import RunnableParallel
1212
from langchain_mistralai import ChatMistralAI
1313
from langchain_openai import ChatOpenAI
@@ -151,7 +151,7 @@ def execute(self, state: dict) -> dict:
151151
template=template_chunks_prompt,
152152
input_variables=["format_instructions"],
153153
partial_variables={
154-
"context": chunk.get("document"),
154+
"content": chunk.get("document"),
155155
"chunk_id": i + 1,
156156
},
157157
)
@@ -163,14 +163,14 @@ def execute(self, state: dict) -> dict:
163163

164164
merge_prompt = PromptTemplate(
165165
template=template_merge_prompt,
166-
input_variables=["context", "question"],
166+
input_variables=["content", "question"],
167167
partial_variables={"format_instructions": format_instructions},
168168
)
169169

170170
merge_chain = merge_prompt | self.llm_model
171171
if output_parser:
172172
merge_chain = merge_chain | output_parser
173-
answer = merge_chain.invoke({"context": batch_results, "question": user_prompt})
173+
answer = merge_chain.invoke({"content": batch_results, "question": user_prompt})
174174

175175
state["answer"] = answer
176176

0 commit comments

Comments
 (0)