Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

LangGraph example #128

Open
ahuang11 opened this issue Feb 18, 2024 · 0 comments
Open

LangGraph example #128

ahuang11 opened this issue Feb 18, 2024 · 0 comments

Comments

@ahuang11
Copy link
Collaborator

import os
import operator
from typing import TypedDict, Annotated, Sequence

from langchain_openai.chat_models import ChatOpenAI
from langchain_core.messages import BaseMessage, SystemMessage
from langgraph.graph import StateGraph, END
import panel as pn

pn.extension()
os.environ["LANGCHAIN_TRACING_V2"] = "true"


class AgentState(TypedDict):
    messages: Annotated[Sequence[BaseMessage], operator.add]
    read_time_mins: int


async def revise(state: AgentState):
    instructions = "Revise the message accordingly."
    messages = [SystemMessage(instructions)] + state["messages"][-2:]
    response = await model.ainvoke(messages)
    return {"messages": [response]}


async def critique(state: AgentState):
    read_time_mins = state["read_time_mins"]
    instructions = (
        f"Read the message and determine whether the message "
        f"can be read in {read_time_mins} minutes as a slow reader. If not, "
        f"suggest what can be done to make the contents about {read_time_mins} "
        f"minutes read time with the important bits, no more, no less. "
        f"If not say `end`."
    )
    messages = [SystemMessage(instructions)] + state["messages"][-2:]
    response = await model.ainvoke(messages)
    return {"messages": [response]}


def continue_revising(state: AgentState):
    last_message = state["messages"][-1].content
    return "end" not in last_message.lower()


async def respond(content: str, user: str, instance: pn.chat.ChatInterface):
    response = app.astream({"messages": [content], "read_time_mins": slider.value})
    async for chunk in response:
        for user, output in chunk.items():
            message = output["messages"][-1].content
            if message != "end" and user != "__end__":
                instance.stream(user=user.title(), value=message)


# add components
workflow = StateGraph(AgentState)
workflow.add_node("critique", critique)
workflow.add_node("revise", revise)
# add connections
workflow.set_entry_point("critique")
workflow.add_edge("revise", "critique")
workflow.add_conditional_edges(
    "critique", continue_revising, {True: "revise", False: END}
)
app = workflow.compile()

model = ChatOpenAI()
slider = pn.widgets.TextAreaInput(name="Read Time (mins)", start=1, end=10, value=1)
interface = pn.chat.ChatInterface(callback=respond, header=pn.Row(slider, width=300))
interface.servable()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant