Skip to content

Commit 6c1e21d

Browse files
author
Erick Friis
authored
core: basemessage.text() (langchain-ai#29078)
1 parent e2ba336 commit 6c1e21d

File tree

10 files changed

+87
-8
lines changed

10 files changed

+87
-8
lines changed

libs/cli/langchain_cli/integration_template/integration_template/chat_models.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ class Chat__ModuleName__(BaseChatModel):
8686
.. code-block:: python
8787
8888
for chunk in llm.stream(messages):
89-
print(chunk)
89+
print(chunk.text(), end="")
9090
9191
.. code-block:: python
9292

libs/core/langchain_core/messages/base.py

+21
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,27 @@ def get_lc_namespace(cls) -> list[str]:
9292
"""
9393
return ["langchain", "schema", "messages"]
9494

95+
def text(self) -> str:
96+
"""Get the text content of the message.
97+
98+
Returns:
99+
The text content of the message.
100+
"""
101+
if isinstance(self.content, str):
102+
return self.content
103+
104+
# must be a list
105+
blocks = [
106+
block
107+
for block in self.content
108+
if isinstance(block, str)
109+
or block.get("type") == "text"
110+
and isinstance(block.get("text"), str)
111+
]
112+
return "".join(
113+
block if isinstance(block, str) else block["text"] for block in blocks
114+
)
115+
95116
def __add__(self, other: Any) -> ChatPromptTemplate:
96117
"""Concatenate this message with another message."""
97118
from langchain_core.prompts.chat import ChatPromptTemplate

libs/core/tests/unit_tests/test_messages.py

+58
Original file line numberDiff line numberDiff line change
@@ -1033,3 +1033,61 @@ def test_tool_message_tool_call_id() -> None:
10331033
ToolMessage("foo", tool_call_id=uuid.uuid4())
10341034
ToolMessage("foo", tool_call_id=1)
10351035
ToolMessage("foo", tool_call_id=1.0)
1036+
1037+
1038+
def test_message_text() -> None:
1039+
# partitions:
1040+
# message types: [ai], [human], [system], [tool]
1041+
# content types: [str], [list[str]], [list[dict]], [list[str | dict]]
1042+
# content: [empty], [single element], [multiple elements]
1043+
# content dict types: [text], [not text], [no type]
1044+
1045+
assert HumanMessage(content="foo").text() == "foo"
1046+
assert AIMessage(content=[]).text() == ""
1047+
assert AIMessage(content=["foo", "bar"]).text() == "foobar"
1048+
assert (
1049+
AIMessage(
1050+
content=[
1051+
{"type": "text", "text": "<thinking>thinking...</thinking>"},
1052+
{
1053+
"type": "tool_use",
1054+
"id": "toolu_01A09q90qw90lq917835lq9",
1055+
"name": "get_weather",
1056+
"input": {"location": "San Francisco, CA"},
1057+
},
1058+
]
1059+
).text()
1060+
== "<thinking>thinking...</thinking>"
1061+
)
1062+
assert (
1063+
SystemMessage(content=[{"type": "text", "text": "foo"}, "bar"]).text()
1064+
== "foobar"
1065+
)
1066+
assert (
1067+
ToolMessage(
1068+
content=[
1069+
{"type": "text", "text": "15 degrees"},
1070+
{
1071+
"type": "image",
1072+
"source": {
1073+
"type": "base64",
1074+
"media_type": "image/jpeg",
1075+
"data": "/9j/4AAQSkZJRg...",
1076+
},
1077+
},
1078+
],
1079+
tool_call_id="1",
1080+
).text()
1081+
== "15 degrees"
1082+
)
1083+
assert (
1084+
AIMessage(content=[{"text": "hi there"}, "hi"]).text() == "hi"
1085+
) # missing type: text
1086+
assert AIMessage(content=[{"type": "nottext", "text": "hi"}]).text() == ""
1087+
assert AIMessage(content=[]).text() == ""
1088+
assert (
1089+
AIMessage(
1090+
content="", tool_calls=[create_tool_call(name="a", args={"b": 1}, id=None)]
1091+
).text()
1092+
== ""
1093+
)

libs/partners/anthropic/langchain_anthropic/chat_models.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -368,7 +368,7 @@ class ChatAnthropic(BaseChatModel):
368368
.. code-block:: python
369369
370370
for chunk in llm.stream(messages):
371-
print(chunk)
371+
print(chunk.text(), end="")
372372
373373
.. code-block:: python
374374

libs/partners/deepseek/langchain_deepseek/chat_models.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ class ChatDeepSeek(BaseChatOpenAI):
7070
.. code-block:: python
7171
7272
for chunk in llm.stream(messages):
73-
print(chunk)
73+
print(chunk.text(), end="")
7474
7575
.. code-block:: python
7676

libs/partners/groq/langchain_groq/chat_models.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,7 @@ class ChatGroq(BaseChatModel):
172172
.. code-block:: python
173173
174174
for chunk in llm.stream(messages):
175-
print(chunk)
175+
print(chunk.text(), end="")
176176
177177
.. code-block:: python
178178

libs/partners/ollama/langchain_ollama/chat_models.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -228,7 +228,7 @@ class ChatOllama(BaseChatModel):
228228
("human", "Return the words Hello World!"),
229229
]
230230
for chunk in llm.stream(messages):
231-
print(chunk)
231+
print(chunk.text(), end="")
232232
233233
234234
.. code-block:: python

libs/partners/openai/langchain_openai/chat_models/azure.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ class AzureChatOpenAI(BaseChatOpenAI):
183183
.. code-block:: python
184184
185185
for chunk in llm.stream(messages):
186-
print(chunk)
186+
print(chunk.text(), end="")
187187
188188
.. code-block:: python
189189

libs/partners/openai/langchain_openai/chat_models/base.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1616,7 +1616,7 @@ class ChatOpenAI(BaseChatOpenAI): # type: ignore[override]
16161616
.. code-block:: python
16171617
16181618
for chunk in llm.stream(messages):
1619-
print(chunk)
1619+
print(chunk.text(), end="")
16201620
16211621
.. code-block:: python
16221622

libs/partners/xai/langchain_xai/chat_models.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ class ChatXAI(BaseChatOpenAI): # type: ignore[override]
9090
.. code-block:: python
9191
9292
for chunk in llm.stream(messages):
93-
print(chunk)
93+
print(chunk.text(), end="")
9494
9595
.. code-block:: python
9696

0 commit comments

Comments
 (0)