Skip to content

Commit

Permalink
Fix migration problem (#1694)
Browse files Browse the repository at this point in the history
* Update Dockerfiles to include user creation and use --user flag for pip install

* Add JavaScriptMIMETypeMiddleware to main.py

* Update constants.py and run.py files

* Update package versions in poetry.lock and pyproject.toml files

* Refactor Dockerfile to optimize image building process

* Fix import error in main.py

* Update Dockerfiles to use logspace/langflow image

* Fix decryption error handling in get_user_store_api_key function

* Add error logging to JavaScriptMIMETypeMiddleware in main.py

* Fix error logging in main.py

* Fix error logging and datetime type in database migrations

* Update openai component

* Update package versions for boto3 and botocore
  • Loading branch information
ogabrielluiz authored Apr 12, 2024
1 parent 7022c81 commit 23f374d
Show file tree
Hide file tree
Showing 13 changed files with 162 additions and 25 deletions.
16 changes: 8 additions & 8 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "1.0.0a20"
version = "1.0.0a21"
description = "A Python package with a built-in web application"
authors = ["Logspace <[email protected]>"]
maintainers = [
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
"""Fix date times again
Revision ID: 4e5980a44eaa
Revises: 79e675cb6752
Create Date: 2024-04-12 18:11:06.454037
"""

from typing import Sequence, Union

import sqlalchemy as sa
from alembic import op
from loguru import logger
from sqlalchemy.dialects import postgresql
from sqlalchemy.engine.reflection import Inspector

# revision identifiers, used by Alembic.
revision: str = "4e5980a44eaa"
down_revision: Union[str, None] = "79e675cb6752"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
table_names = inspector.get_table_names()
# ### commands auto generated by Alembic - please adjust! ###
if "apikey" in table_names:
columns = inspector.get_columns("apikey")
created_at_column = next((column for column in columns if column["name"] == "created_at"), None)
if created_at_column is not None and isinstance(created_at_column["type"], postgresql.TIMESTAMP):
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.alter_column(
"created_at",
existing_type=postgresql.TIMESTAMP(),
type_=sa.DateTime(timezone=True),
existing_nullable=False,
)
else:
if created_at_column is None:
logger.warning("Column 'created_at' not found in table 'apikey'")
else:
logger.warning(f"Column 'created_at' has type {created_at_column['type']} in table 'apikey'")
if "variable" in table_names:
columns = inspector.get_columns("variable")
created_at_column = next((column for column in columns if column["name"] == "created_at"), None)
updated_at_column = next((column for column in columns if column["name"] == "updated_at"), None)
with op.batch_alter_table("variable", schema=None) as batch_op:
if created_at_column is not None and isinstance(created_at_column["type"], postgresql.TIMESTAMP):
batch_op.alter_column(
"created_at",
existing_type=postgresql.TIMESTAMP(),
type_=sa.DateTime(timezone=True),
existing_nullable=True,
)
else:
if created_at_column is None:
logger.warning("Column 'created_at' not found in table 'variable'")
else:
logger.warning(f"Column 'created_at' has type {created_at_column['type']} in table 'variable'")
if updated_at_column is not None and isinstance(updated_at_column["type"], postgresql.TIMESTAMP):
batch_op.alter_column(
"updated_at",
existing_type=postgresql.TIMESTAMP(),
type_=sa.DateTime(timezone=True),
existing_nullable=True,
)
else:
if updated_at_column is None:
logger.warning("Column 'updated_at' not found in table 'variable'")
else:
logger.warning(f"Column 'updated_at' has type {updated_at_column['type']} in table 'variable'")

# ### end Alembic commands ###


def downgrade() -> None:
conn = op.get_bind()
inspector = Inspector.from_engine(conn) # type: ignore
table_names = inspector.get_table_names()
# ### commands auto generated by Alembic - please adjust! ###
if "variable" in table_names:
columns = inspector.get_columns("variable")
created_at_column = next((column for column in columns if column["name"] == "created_at"), None)
updated_at_column = next((column for column in columns if column["name"] == "updated_at"), None)
with op.batch_alter_table("variable", schema=None) as batch_op:
if updated_at_column is not None and isinstance(updated_at_column["type"], sa.DateTime):
batch_op.alter_column(
"updated_at",
existing_type=sa.DateTime(timezone=True),
type_=postgresql.TIMESTAMP(),
existing_nullable=True,
)
else:
if updated_at_column is None:
logger.warning("Column 'updated_at' not found in table 'variable'")
else:
logger.warning(f"Column 'updated_at' has type {updated_at_column['type']} in table 'variable'")
if created_at_column is not None and isinstance(created_at_column["type"], sa.DateTime):
batch_op.alter_column(
"created_at",
existing_type=sa.DateTime(timezone=True),
type_=postgresql.TIMESTAMP(),
existing_nullable=True,
)
else:
if created_at_column is None:
logger.warning("Column 'created_at' not found in table 'variable'")
else:
logger.warning(f"Column 'created_at' has type {created_at_column['type']} in table 'variable'")

if "apikey" in table_names:
columns = inspector.get_columns("apikey")
created_at_column = next((column for column in columns if column["name"] == "created_at"), None)
if created_at_column is not None and isinstance(created_at_column["type"], sa.DateTime):
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.alter_column(
"created_at",
existing_type=sa.DateTime(timezone=True),
type_=postgresql.TIMESTAMP(),
existing_nullable=False,
)
else:
if created_at_column is None:
logger.warning("Column 'created_at' not found in table 'apikey'")
else:
logger.warning(f"Column 'created_at' has type {created_at_column['type']} in table 'apikey'")

# ### end Alembic commands ###
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def upgrade() -> None:
if "apikey" in table_names:
columns = inspector.get_columns("apikey")
created_at_column = next((column for column in columns if column["name"] == "created_at"), None)
if created_at_column is not None and created_at_column["type"] == postgresql.TIMESTAMP():
if created_at_column is not None and isinstance(created_at_column["type"], postgresql.TIMESTAMP):
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.alter_column(
"created_at",
Expand All @@ -47,7 +47,7 @@ def upgrade() -> None:
created_at_column = next((column for column in columns if column["name"] == "created_at"), None)
updated_at_column = next((column for column in columns if column["name"] == "updated_at"), None)
with op.batch_alter_table("variable", schema=None) as batch_op:
if created_at_column is not None and created_at_column["type"] == postgresql.TIMESTAMP():
if created_at_column is not None and isinstance(created_at_column["type"], postgresql.TIMESTAMP):
batch_op.alter_column(
"created_at",
existing_type=postgresql.TIMESTAMP(),
Expand All @@ -59,7 +59,7 @@ def upgrade() -> None:
logger.warning("Column 'created_at' not found in table 'variable'")
else:
logger.warning(f"Column 'created_at' has type {created_at_column['type']} in table 'variable'")
if updated_at_column is not None and updated_at_column["type"] == postgresql.TIMESTAMP():
if updated_at_column is not None and isinstance(updated_at_column["type"], postgresql.TIMESTAMP):
batch_op.alter_column(
"updated_at",
existing_type=postgresql.TIMESTAMP(),
Expand All @@ -85,7 +85,7 @@ def downgrade() -> None:
created_at_column = next((column for column in columns if column["name"] == "created_at"), None)
updated_at_column = next((column for column in columns if column["name"] == "updated_at"), None)
with op.batch_alter_table("variable", schema=None) as batch_op:
if updated_at_column is not None and updated_at_column["type"] == sa.DateTime(timezone=True):
if updated_at_column is not None and isinstance(updated_at_column["type"], sa.DateTime):
batch_op.alter_column(
"updated_at",
existing_type=sa.DateTime(timezone=True),
Expand All @@ -97,7 +97,7 @@ def downgrade() -> None:
logger.warning("Column 'updated_at' not found in table 'variable'")
else:
logger.warning(f"Column 'updated_at' has type {updated_at_column['type']} in table 'variable'")
if created_at_column is not None and created_at_column["type"] == sa.DateTime(timezone=True):
if created_at_column is not None and isinstance(created_at_column["type"], sa.DateTime):
batch_op.alter_column(
"created_at",
existing_type=sa.DateTime(timezone=True),
Expand All @@ -113,7 +113,7 @@ def downgrade() -> None:
if "apikey" in table_names:
columns = inspector.get_columns("apikey")
created_at_column = next((column for column in columns if column["name"] == "created_at"), None)
if created_at_column is not None and created_at_column["type"] == sa.DateTime(timezone=True):
if created_at_column is not None and isinstance(created_at_column["type"], sa.DateTime):
with op.batch_alter_table("apikey", schema=None) as batch_op:
batch_op.alter_column(
"created_at",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@
"list": false,
"show": true,
"multiline": true,
"value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
"value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-2024-04-09\",\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n",
"fileTypes": [],
"file_path": "",
"password": false,
Expand Down Expand Up @@ -222,6 +222,7 @@
"file_path": "",
"password": false,
"options": [
"gpt-4-turbo-2024-04-09",
"gpt-4-turbo-preview",
"gpt-3.5-turbo",
"gpt-4-0125-preview",
Expand Down
Loading

0 comments on commit 23f374d

Please sign in to comment.