Skip to content

Commit

Permalink
feat: Add unit tests for run_flow_from_json with fake environment var…
Browse files Browse the repository at this point in the history
…iables (#4015)

* Add tests for run_flow_from_json with fake environment variables

- Implemented test_run_flow_with_fake_env to validate flow execution with a fake .env file.
- Added test_run_flow_with_fake_env_TWEAKS to check flow execution using environment variables loaded from the fake .env file.

* Replace keys in tweaks with their corresponding environment variable values

- Implemented a function to recursively replace keys in the tweaks dictionary with values from the provided environment variables.

* updated to use better way to load test  json file

* [autofix.ci] apply automated fixes

* [autofix.ci] apply automated fixes

* refactor: improve test readability and consistency in load tests

- Renamed variable `TWEAKS` to `tweaks_dict` for clarity and consistency across tests.
- Updated test function names to follow a consistent naming convention.
- Enhanced comments for better understanding of test intentions.
- Minor formatting adjustments to improve code readability.

* feat: add aload_flow_from_json and arun_flow_from_json to module exports

* fix: correct file path handling in aload_flow_from_json function

* fix: improve environment variable handling in aload_flow_from_json function

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: Gabriel Luiz Freitas Almeida <[email protected]>
  • Loading branch information
4 people authored Jan 31, 2025
1 parent e26b411 commit 0514d11
Show file tree
Hide file tree
Showing 6 changed files with 91 additions and 10 deletions.
3 changes: 2 additions & 1 deletion src/backend/base/langflow/load/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
from .load import aload_flow_from_json, arun_flow_from_json, load_flow_from_json, run_flow_from_json
from .utils import get_flow, upload_file
from .utils import get_flow, replace_tweaks_with_env, upload_file

__all__ = [
"aload_flow_from_json",
"arun_flow_from_json",
"get_flow",
"load_flow_from_json",
"replace_tweaks_with_env",
"run_flow_from_json",
"upload_file",
]
14 changes: 9 additions & 5 deletions src/backend/base/langflow/load/load.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
import asyncio
import json
from io import StringIO
from pathlib import Path

from aiofile import async_open
from dotenv import load_dotenv
from dotenv import dotenv_values
from loguru import logger

from langflow.graph import Graph
from langflow.graph.schema import RunOutputs
from langflow.load.utils import replace_tweaks_with_env
from langflow.logging.logger import configure
from langflow.processing.process import process_tweaks, run_graph
from langflow.utils.async_helpers import run_until_complete
Expand Down Expand Up @@ -49,14 +50,17 @@ async def aload_flow_from_json(
configure(log_level=log_level, log_file=log_file_path, disable=disable_logs, async_file=True)

# override env variables with .env file
if env_file:
await asyncio.to_thread(load_dotenv, env_file, override=True)
if env_file and tweaks is not None:
async with async_open(Path(env_file), encoding="utf-8") as f:
content = await f.read()
env_vars = dotenv_values(stream=StringIO(content))
tweaks = replace_tweaks_with_env(tweaks=tweaks, env_vars=env_vars)

# Update settings with cache and components path
await update_settings(cache=cache)

if isinstance(flow, str | Path):
async with async_open(Path(flow).name, encoding="utf-8") as f:
async with async_open(Path(flow), encoding="utf-8") as f:
content = await f.read()
flow_graph = json.loads(content)
# If input is a dictionary, assume it's a JSON object
Expand Down
27 changes: 25 additions & 2 deletions src/backend/base/langflow/load/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,5 +99,28 @@ def get_flow(url: str, flow_id: str):
msg = f"Error retrieving flow: {e}"
raise UploadError(msg) from e

msg = f"Error retrieving flow: {response.status_code}"
raise UploadError(msg)

def replace_tweaks_with_env(tweaks: dict, env_vars: dict) -> dict:
"""Replace keys in the tweaks dictionary with their corresponding environment variable values.
This function recursively traverses the tweaks dictionary and replaces any string keys
with their values from the provided environment variables. If a key's value is a dictionary,
the function will call itself to handle nested dictionaries.
Args:
tweaks (dict): A dictionary containing keys that may correspond to environment variable names.
env_vars (dict): A dictionary of environment variables where keys are variable names
and values are their corresponding values.
Returns:
dict: The updated tweaks dictionary with keys replaced by their environment variable values.
"""
for key, value in tweaks.items():
if isinstance(value, dict):
# Recursively replace in nested dictionaries
tweaks[key] = replace_tweaks_with_env(value, env_vars)
elif isinstance(value, str):
env_value = env_vars.get(value) # Get the value from the provided environment variables
if env_value is not None:
tweaks[key] = env_value
return tweaks
1 change: 1 addition & 0 deletions src/backend/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ def pytest_configure(config):
pytest.VECTOR_STORE_PATH = data_path / "Vector_store.json"
pytest.SIMPLE_API_TEST = data_path / "SimpleAPITest.json"
pytest.MEMORY_CHATBOT_NO_LLM = data_path / "MemoryChatbotNoLLM.json"
pytest.ENV_VARIABLE_TEST = data_path / "env_variable_test.json"
pytest.LOOP_TEST = data_path / "LoopTest.json"
pytest.CODE_WITH_SYNTAX_ERROR = """
def get_text():
Expand Down
1 change: 1 addition & 0 deletions src/backend/tests/data/env_variable_test.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"id":"a7003613-8243-4f71-800c-6be1c4065518","data":{"nodes":[{"id":"Secret-zIbKs","type":"genericNode","position":{"x":397.9312192693087,"y":262.8483455882353},"data":{"type":"Secret","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.custom import Component\nfrom langflow.io import SecretStrInput, Output\nfrom langflow.schema.message import Message\n\n\nclass SecretComponent(Component):\n display_name = \"SecretComponent\"\n description = \"SECURE.\"\n icon = \"lock\"\n name = \"Secret\"\n\n inputs = [\n SecretStrInput(\n name=\"secret_key_input\",\n display_name=\"Secret Key\",\n info=\"The Secret to be reveald.\",\n required=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Secret\", name=\"text\", method=\"text_response\"),\n ]\n\n def text_response(self) -> Message:\n self.log(self.secret_key_input)\n message = Message(\n text=self.secret_key_input,\n )\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"secret_key_input":{"load_from_db":false,"required":true,"placeholder":"","show":true,"name":"secret_key_input","value":"","display_name":"Secret Key","advanced":false,"input_types":["Message"],"dynamic":false,"info":"The Secret to be reveald.","title_case":false,"password":true,"type":"str","_input_type":"SecretStrInput"}},"description":"SECURE.","icon":"lock","base_classes":["Message"],"display_name":"SecretComponent","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"text","display_name":"Secret","method":"text_response","value":"__UNDEFINED__","cache":true}],"field_order":["secret_key_input"],"beta":false,"edited":true,"metadata":{},"lf_version":"1.0.18"},"id":"Secret-zIbKs"},"selected":false,"width":384,"height":289,"positionAbsolute":{"x":397.9312192693087,"y":262.8483455882353},"dragging":false},{"id":"ChatOutput-u9cPC","type":"genericNode","position":{"x":863,"y":265.171875},"data":{"type":"ChatOutput","node":{"template":{"_type":"Component","code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":true,"dynamic":true,"info":"","load_from_db":false,"title_case":false},"data_template":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"data_template","value":"{text}","display_name":"Data Template","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"input_value":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"input_value","value":"","display_name":"Text","advanced":false,"input_types":["Message"],"dynamic":false,"info":"Message to be passed as output.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"sender":{"trace_as_metadata":true,"options":["Machine","User"],"combobox":false,"required":false,"placeholder":"","show":true,"name":"sender","value":"Machine","display_name":"Sender Type","advanced":true,"dynamic":false,"info":"Type of sender.","title_case":false,"type":"str","_input_type":"DropdownInput"},"sender_name":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"sender_name","value":"AI","display_name":"Sender Name","advanced":true,"input_types":["Message"],"dynamic":false,"info":"Name of the sender.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"session_id":{"trace_as_input":true,"trace_as_metadata":true,"load_from_db":false,"list":false,"required":false,"placeholder":"","show":true,"name":"session_id","value":"","display_name":"Session ID","advanced":true,"input_types":["Message"],"dynamic":false,"info":"The session ID of the chat. If empty, the current session ID parameter will be used.","title_case":false,"type":"str","_input_type":"MessageTextInput"},"should_store_message":{"trace_as_metadata":true,"list":false,"required":false,"placeholder":"","show":true,"name":"should_store_message","value":true,"display_name":"Store Messages","advanced":true,"dynamic":false,"info":"Store the message in the history.","title_case":false,"type":"bool","_input_type":"BoolInput"}},"description":"Display a chat message in the Playground.","icon":"ChatOutput","base_classes":["Message"],"display_name":"Chat Output","documentation":"","custom_fields":{},"output_types":[],"pinned":false,"conditional_paths":[],"frozen":false,"outputs":[{"types":["Message"],"selected":"Message","name":"message","display_name":"Message","method":"message_response","value":"__UNDEFINED__","cache":true}],"field_order":["input_value","should_store_message","sender","sender_name","session_id","data_template"],"beta":false,"edited":false,"metadata":{},"lf_version":"1.0.18"},"id":"ChatOutput-u9cPC"},"selected":false,"width":384,"height":289}],"edges":[{"source":"Secret-zIbKs","sourceHandle":"{œdataTypeœ:œSecretœ,œidœ:œSecret-zIbKsœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}","target":"ChatOutput-u9cPC","targetHandle":"{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-u9cPCœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","data":{"targetHandle":{"fieldName":"input_value","id":"ChatOutput-u9cPC","inputTypes":["Message"],"type":"str"},"sourceHandle":{"dataType":"Secret","id":"Secret-zIbKs","name":"text","output_types":["Message"]}},"id":"reactflow__edge-Secret-zIbKs{œdataTypeœ:œSecretœ,œidœ:œSecret-zIbKsœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-u9cPC{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-u9cPCœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}","animated":false,"className":""}],"viewport":{"x":11.839003462770279,"y":-83.83942756687532,"zoom":1.0894902752636453}},"description":"Engineered for Excellence, Built for Business.","name":"env_variable_test","last_tested_version":"1.0.18","endpoint_name":"env_variable_test","is_component":false}
55 changes: 53 additions & 2 deletions src/backend/tests/unit/base/load/test_load.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import inspect
import os

import pytest
from dotenv import load_dotenv
from langflow.load import run_flow_from_json


Expand All @@ -26,5 +29,53 @@ def test_run_flow_from_json_params():
params = func_spec.args + func_spec.kwonlyargs
assert expected_params.issubset(params), "Not all expected parameters are present in run_flow_from_json"

# TODO: Add tests by loading a flow and running it need to text with fake llm and check if it returns the
# correct output
# TODO: Add tests by loading a flow and running it need to text with fake llm and check if it
# returns the correct output


@pytest.fixture
def fake_env_file(tmp_path):
# Create a fake .env file
env_file = tmp_path / ".env"
env_file.write_text("TEST_OP=TESTWORKS")
return env_file


def test_run_flow_with_fake_env(fake_env_file):
# Load the flow from the JSON file
# flow_file = Path("src/backend/tests/data/env_variable_test.json")
flow_file = pytest.ENV_VARIABLE_TEST
tweaks_dict = {"Secret-zIbKs": {"secret_key_input": "TEST_OP"}}

# Run the flow from JSON, providing the fake env file
result = run_flow_from_json(
flow=flow_file,
input_value="some_input_value",
env_file=str(fake_env_file), # Pass the path of the fake env file
tweaks=tweaks_dict,
)
# Extract and check the output data
output_data = result[0].outputs[0].results["message"].data["text"]
assert output_data == "TESTWORKS"


def test_run_flow_with_fake_env_tweaks(fake_env_file):
# Load the flow from the JSON file
# flow_file = Path("src/backend/tests/data/env_variable_test.json")
flow_file = pytest.ENV_VARIABLE_TEST

# Load env file and set up tweaks

load_dotenv(str(fake_env_file))
tweaks = {
"Secret-zIbKs": {"secret_key_input": os.environ["TEST_OP"]},
}
# Run the flow from JSON without passing the env_file
result = run_flow_from_json(
flow=flow_file,
input_value="some_input_value",
tweaks=tweaks,
)
# Extract and check the output data
output_data = result[0].outputs[0].results["message"].data["text"]
assert output_data == "TESTWORKS"

0 comments on commit 0514d11

Please sign in to comment.