diff --git a/backend/app/alembic/versions/6e7c33ddf30f_rename_provider_names_in_members_table_.py b/backend/app/alembic/versions/6e7c33ddf30f_rename_provider_names_in_members_table_.py new file mode 100644 index 00000000..52b31beb --- /dev/null +++ b/backend/app/alembic/versions/6e7c33ddf30f_rename_provider_names_in_members_table_.py @@ -0,0 +1,40 @@ +"""Rename provider names in member table to new name + +Revision ID: 6e7c33ddf30f +Revises: 0a354b5c6f6c +Create Date: 2024-07-27 04:29:51.886906 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '6e7c33ddf30f' +down_revision = '0a354b5c6f6c' +branch_labels = None +depends_on = None + + +def upgrade(): + # Mapping of old provider names to new provider names + mapping = { + 'ChatOpenAI': 'openai', + 'ChatAnthropic': 'anthropic', + } + + # Rename each provider name according to the mapping + for old_name, new_name in mapping.items(): + op.execute(f"UPDATE member SET provider = '{new_name}' WHERE provider = '{old_name}'") + + +def downgrade(): + # Mapping of new provider names back to old provider names + mapping = { + 'openai': 'ChatOpenAI', + 'anthropic': 'ChatAnthropic', + } + + # Revert each provider name according to the mapping + for new_name, old_name in mapping.items(): + op.execute(f"UPDATE member SET provider = '{old_name}' WHERE provider = '{new_name}'") \ No newline at end of file diff --git a/backend/app/core/graph/members.py b/backend/app/core/graph/members.py index 4cafc2f8..c5143d1d 100644 --- a/backend/app/core/graph/members.py +++ b/backend/app/core/graph/members.py @@ -1,6 +1,7 @@ from collections.abc import Mapping, Sequence from typing import Annotated, Any +from langchain.chat_models import init_chat_model from langchain.tools.retriever import create_retriever_tool from langchain_core.messages import AIMessage, AnyMessage from langchain_core.output_parsers.openai_tools import JsonOutputKeyToolsParser @@ -16,7 +17,6 @@ from pydantic import BaseModel, Field from typing_extensions import NotRequired, TypedDict -from app.core.graph.models import all_models from app.core.graph.rag.qdrant import QdrantStore from app.core.graph.skills import managed_skills from app.core.graph.skills.api_tool import dynamic_api_tool @@ -147,12 +147,12 @@ class ReturnTeamState(TypedDict): class BaseNode: def __init__(self, provider: str, model: str, temperature: float): - self.model = all_models[provider]( - model=model, temperature=temperature, streaming=True - ) # type: ignore[call-arg] - self.final_answer_model = all_models[provider]( - model=model, temperature=0, streaming=True - ) # type: ignore[call-arg] + self.model = init_chat_model( + model, model_provider=provider, temperature=temperature, streaming=True + ) + self.final_answer_model = init_chat_model( + model, model_provider=provider, temperature=0, streaming=True + ) def tag_with_name(self, ai_message: AIMessage, name: str) -> AIMessage: """Tag a name to the AI message""" diff --git a/backend/app/core/graph/models.py b/backend/app/core/graph/models.py deleted file mode 100644 index ca479beb..00000000 --- a/backend/app/core/graph/models.py +++ /dev/null @@ -1,14 +0,0 @@ -from langchain_anthropic import ChatAnthropic -from langchain_cohere import ChatCohere -from langchain_google_genai import ChatGoogleGenerativeAI -from langchain_openai import ChatOpenAI - -# Define a dictionary to store all models -all_models: dict[ - str, type[ChatOpenAI | ChatAnthropic | ChatCohere | ChatGoogleGenerativeAI] -] = { - "ChatOpenAI": ChatOpenAI, - "ChatAnthropic": ChatAnthropic, - "ChatCohere": ChatCohere, - "ChatGoogleGenerativeAI": ChatGoogleGenerativeAI, -} diff --git a/backend/app/models.py b/backend/app/models.py index 4cd0048d..a7d9eb36 100644 --- a/backend/app/models.py +++ b/backend/app/models.py @@ -251,8 +251,8 @@ class MemberBase(SQLModel): position_x: float position_y: float source: int | None = None - provider: str = "ChatOpenAI" - model: str = "gpt-3.5-turbo" + provider: str = "openai" + model: str = "gpt-4o-mini" temperature: float = 0.7 interrupt: bool = False diff --git a/backend/app/tests/api/routes/test_members.py b/backend/app/tests/api/routes/test_members.py index ee2ccdb0..2d4a0422 100644 --- a/backend/app/tests/api/routes/test_members.py +++ b/backend/app/tests/api/routes/test_members.py @@ -33,7 +33,7 @@ def test_read_members( "position_x": 0.0, "position_y": 0.0, "source": None, - "provider": "ChatOpenAI", + "provider": "openai", "model": "gpt-3.5-turbo", "temperature": 0.7, "interrupt": False, @@ -70,7 +70,7 @@ def test_read_member( "position_x": 0.0, "position_y": 0.0, "source": None, - "provider": "ChatOpenAI", + "provider": "openai", "model": "gpt-3.5-turbo", "temperature": 0.7, "interrupt": False, @@ -107,7 +107,7 @@ def test_create_member( "position_x": 0.0, "position_y": 0.0, "source": None, - "provider": "ChatOpenAI", + "provider": "openai", "model": "gpt-3.5-turbo", "temperature": 0.7, "interrupt": False, @@ -138,7 +138,7 @@ def test_create_member_duplicate_name( "position_x": 0.0, "position_y": 0.0, "source": None, - "provider": "ChatOpenAI", + "provider": "openai", "model": "gpt-3.5-turbo", "temperature": 0.7, "interrupt": False, @@ -160,7 +160,7 @@ def test_create_member_duplicate_name( "position_x": 0.0, "position_y": 0.0, "source": None, - "provider": "ChatOpenAI", + "provider": "openai", "model": "gpt-3.5-turbo", "temperature": 0.7, "interrupt": False, @@ -187,7 +187,7 @@ def test_update_member( "position_x": 0.0, "position_y": 0.0, "source": None, - "provider": "ChatOpenAI", + "provider": "openai", "model": "gpt-3.5-turbo", "temperature": 0.7, "interrupt": False, @@ -226,7 +226,7 @@ def test_delete_member( "position_x": 0.0, "position_y": 0.0, "source": None, - "provider": "ChatOpenAI", + "provider": "openai", "model": "gpt-3.5-turbo", "temperature": 0.7, "interrupt": False, diff --git a/backend/poetry.lock b/backend/poetry.lock index f13b594c..74c8ea54 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -2102,19 +2102,19 @@ zookeeper = ["kazoo (>=2.8.0)"] [[package]] name = "langchain" -version = "0.2.7" +version = "0.2.8" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain-0.2.7-py3-none-any.whl", hash = "sha256:98e79e0b9a60a9c740b44d5b0135c85f649219308f30d373cf5f10d0efe18b87"}, - {file = "langchain-0.2.7.tar.gz", hash = "sha256:8742f363d2890854501e0075af04fcb470600f201dec251c9bd5841e1990e73d"}, + {file = "langchain-0.2.8-py3-none-any.whl", hash = "sha256:53e7dfe50294a14200f33bec22b4e14cb63857ccf0a5500b0d18b0fd51285d58"}, + {file = "langchain-0.2.8.tar.gz", hash = "sha256:7fecb309e3558cde4e5cf7e9ffb7c1ab3f07121c40a7ff3b0c27135f8120c296"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} -langchain-core = ">=0.2.12,<0.3.0" +langchain-core = ">=0.2.19,<0.3.0" langchain-text-splitters = ">=0.2.0,<0.3.0" langsmith = ">=0.1.17,<0.2.0" numpy = [ @@ -5325,4 +5325,4 @@ repair = ["scipy (>=1.6.3)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "b420b5041f1322c7f2fd8f8e82dd407bd60f83ff1bb9eddfe0e43bf0b06e8c2a" +content-hash = "11ffa28c07960f48a65de06e5756e5c411ab37dd34305968465918ada69557dc" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 78024773..ac3d0e5a 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -29,7 +29,7 @@ langgraph = "0.1.9" langserve = {extras = ["server"], version = "^0.0.51"} langchain-openai = "0.1.17" grandalf = "^0.8" -langchain = "0.2.7" +langchain = "0.2.8" langchain-community = "0.2.7" duckduckgo-search = "6.1.0" wikipedia = "^1.4.0" diff --git a/frontend/src/components/Members/EditMember.tsx b/frontend/src/components/Members/EditMember.tsx index d61d2b50..b42f6a1f 100644 --- a/frontend/src/components/Members/EditMember.tsx +++ b/frontend/src/components/Members/EditMember.tsx @@ -52,14 +52,12 @@ const customSelectOption = { // TODO: Place this somewhere else. const AVAILABLE_MODELS = { - ChatOpenAI: ["gpt-4o-mini", "gpt-4o", "gpt-4-turbo", "gpt-3.5-turbo"], - ChatAnthropic: [ + openai: ["gpt-4o-mini", "gpt-4o", "gpt-4-turbo", "gpt-3.5-turbo"], + anthropic: [ "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307", ], - // ChatCohere: ["command"], - // ChatGoogleGenerativeAI: ["gemini-pro"], } type ModelProvider = keyof typeof AVAILABLE_MODELS