Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions external-lib/mcp-use/mcp_use/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,8 @@ async def create_session(self, server_name: str, auto_initialize: bool = True) -

# Create the session
session = MCPSession(connector)
if auto_initialize:
await session.initialize()
# if auto_initialize:
# await session.initialize()
self.sessions[server_name] = session

# Add to active sessions
Expand Down
2 changes: 1 addition & 1 deletion external-lib/mcp-use/mcp_use/connectors/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ async def connect(self) -> None:

try:
# Try to initialize - this is where streamable HTTP vs SSE difference should show up
await test_client.initialize()
#await test_client.initialize()

# If we get here, streamable HTTP works

Expand Down
2 changes: 1 addition & 1 deletion mcp-chat-agent/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ RUN pip install --no-cache-dir -r requirements.txt
COPY . .

# Install mcp-use package from GitHub
RUN pip install git+https://github.com/wso2/bijira-samples.git@mcp-sample#subdirectory=external-lib/mcp-use
RUN pip install git+https://github.com/wso2/bijira-samples.git#subdirectory=external-lib/mcp-use

# Expose port 5050
EXPOSE 5050
Expand Down
63 changes: 41 additions & 22 deletions mcp-chat-agent/mcp_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
_cached_llm = None
_cached_client = None
_cached_config = None
_cached_agent = None

custom_prompt = """
You are a helpful assistant which helps users in their queries. In these queries try to response using
Expand Down Expand Up @@ -59,37 +60,55 @@ def _create_llm(config):
model_name = config.get("model_name", "")
return ChatOpenAI(model=model_name, api_key=api_key)

async def async_main(query, config=None):
global _cached_llm, _cached_client, _cached_config
async def _initialize_agent(config):
"""Initialize the agent with the given configuration."""
global _cached_llm, _cached_client, _cached_config, _cached_agent

if config is None:
config = {}
# Create LLM instance
_cached_llm = _create_llm(config)

if _config_changed(config, _cached_config):
print("Configuration changed, reinitializing LLM and client...")

# Create LLM instance
_cached_llm = _create_llm(config)

# Create client instance with OAuth2 processing
mcp_config = config.get("mcp", {})
# Create client instance with OAuth2 processing
mcp_config = config.get("mcp", {})

# Process OAuth2 authentication
processed_mcp_config = await process_mcp_config_with_oauth2(mcp_config)

_cached_client = MCPClient.from_dict(processed_mcp_config)

# Cache the current config
_cached_config = config.copy()
# Process OAuth2 authentication
processed_mcp_config = await process_mcp_config_with_oauth2(mcp_config)
_cached_client = MCPClient.from_dict(processed_mcp_config)

# Cache the current config
_cached_config = config.copy()

# Create agent with from llm and client
agent = MCPAgent(
_cached_agent = MCPAgent(
llm=_cached_llm,
client=_cached_client,
system_prompt=custom_prompt,
memory_enabled=True,
max_steps=10
max_steps=30,
)
result = await agent.run(query)

async def async_main(query, config=None):
global _cached_llm, _cached_client, _cached_config, _cached_agent

if config is None:
config = {}

# Only initialize/reinitialize agent if config changed or agent doesn't exist
if _cached_agent is None or _config_changed(config, _cached_config):
await _initialize_agent(config)

# Use the existing agent with memory for the query
result = await _cached_agent.run(query)
return str(result)

def clear_conversation_history():
"""Clear the conversation history of the cached agent."""
global _cached_agent
if _cached_agent is not None:
_cached_agent.clear_conversation_history()

async def cleanup():
"""Clean up resources when shutting down."""
global _cached_client
if _cached_client and hasattr(_cached_client, 'sessions'):
await _cached_client.close_all_sessions()

5 changes: 5 additions & 0 deletions mcp-chat-agent/oauth2_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,10 @@ async def process_mcp_config_with_oauth2(mcp_config: Dict[str, Any]) -> Dict[str
key in oauth2_config and oauth2_config[key]
for key in ["client_id", "client_secret", "token_endpoint"]
):
if oauth2_config['client_id'] == "" or oauth2_config['client_secret'] == "" or oauth2_config['token_endpoint'] == "":
del processed_server["oauth2"]
processed_servers[server_name] = processed_server
continue
logger.info(f"Processing OAuth2 authentication for server: {server_name}")

# Get OAuth2 token
Expand All @@ -112,6 +116,7 @@ async def process_mcp_config_with_oauth2(mcp_config: Dict[str, Any]) -> Dict[str
logger.info(f"OAuth2 token applied to server: {server_name}")
else:
logger.error(f"Failed to get OAuth2 token for server: {server_name}")
del processed_server["oauth2"]

processed_servers[server_name] = processed_server

Expand Down
3 changes: 1 addition & 2 deletions mcp-chat-agent/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
flask==2.3.0

httpx==0.27.0

# langchain model dependencies
mcp==1.9.4 # langchain model dependencies
langchain-openai==0.3.27
langchain-anthropic==0.3.16
langchain-mistralai==0.2.10