From 5998c260c0c05ebffc7335057448b87d20c0a9bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 18:14:08 -0700 Subject: [PATCH 01/22] build: matching Python versions with pytorch/pytorch:2.8.0-cuda12.8-cudnn9-runtime from GPU image --- .python-version | 2 +- Dockerfile-cpu | 2 +- uv.lock | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.python-version b/.python-version index e4fba21..2c07333 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.12 +3.11 diff --git a/Dockerfile-cpu b/Dockerfile-cpu index e931267..8b9a926 100644 --- a/Dockerfile-cpu +++ b/Dockerfile-cpu @@ -1,4 +1,4 @@ -FROM python:3.12-slim +FROM python:3.11-slim WORKDIR /app diff --git a/uv.lock b/uv.lock index ef96dd3..7ad246a 100644 --- a/uv.lock +++ b/uv.lock @@ -2614,7 +2614,7 @@ wheels = [ [[package]] name = "worker-tetra" -version = "0.6.0" +version = "0.7.0" source = { virtual = "." } dependencies = [ { name = "cloudpickle" }, From 48d3e6c4087d34410713ecff76c341094f180f54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 18:14:38 -0700 Subject: [PATCH 02/22] chore: vscode type-hint helpers --- .vscode/settings.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 3873e4e..733069a 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -6,6 +6,7 @@ "python.testing.pytestEnabled": true, "python.envFile": "${workspaceFolder}/.env", "python.analysis.extraPaths": [ - "${workspaceFolder}/src" + "${workspaceFolder}/src", + "${workspaceFolder}/tetra-rp/src", ] } From 020412dd4409335898582c59f1b9ee00fa94e406 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 20:02:38 -0700 Subject: [PATCH 03/22] refactor: handler --> live_serverless.handler --- src/handler.py | 26 ++----------------- src/live_serverless/__init__.py | 25 ++++++++++++++++++ tests/conftest.py | 2 +- tests/integration/test_handler_integration.py | 3 ++- tests/unit/test_handler.py | 8 +++--- 5 files changed, 34 insertions(+), 30 deletions(-) create mode 100644 src/live_serverless/__init__.py diff --git a/src/handler.py b/src/handler.py index 731da51..0b5412a 100644 --- a/src/handler.py +++ b/src/handler.py @@ -1,34 +1,12 @@ import runpod -from typing import Dict, Any - -from remote_execution import FunctionRequest, FunctionResponse -from remote_executor import RemoteExecutor from logger import setup_logging +from live_serverless import handler + # Initialize logging configuration setup_logging() -async def handler(event: Dict[str, Any]) -> Dict[str, Any]: - """ - RunPod serverless function handler with dependency installation. - """ - output: FunctionResponse - - try: - executor = RemoteExecutor() - input_data = FunctionRequest(**event.get("input", {})) - output = await executor.ExecuteFunction(input_data) - - except Exception as error: - output = FunctionResponse( - success=False, - error=f"Error in handler: {str(error)}", - ) - - return output.model_dump() - - # Start the RunPod serverless handler if __name__ == "__main__": runpod.serverless.start({"handler": handler}) diff --git a/src/live_serverless/__init__.py b/src/live_serverless/__init__.py new file mode 100644 index 0000000..cde95da --- /dev/null +++ b/src/live_serverless/__init__.py @@ -0,0 +1,25 @@ +from typing import Dict, Any + +from remote_execution import FunctionRequest, FunctionResponse +from remote_executor import RemoteExecutor + + +async def handler(event: Dict[str, Any]) -> Dict[str, Any]: + """ + RunPod serverless function handler with dependency installation. + """ + output: FunctionResponse + + try: + executor = RemoteExecutor() + input_data = FunctionRequest(**event.get("input", {})) + output = await executor.ExecuteFunction(input_data) + + except Exception as error: + output = FunctionResponse( + success=False, + error=f"Error in handler: {str(error)}", + ) + + return output.model_dump() + diff --git a/tests/conftest.py b/tests/conftest.py index 9bdd27b..1603187 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,7 +3,7 @@ import cloudpickle from unittest.mock import MagicMock from remote_execution import FunctionRequest -from handler import RemoteExecutor +from remote_executor import RemoteExecutor @pytest.fixture diff --git a/tests/integration/test_handler_integration.py b/tests/integration/test_handler_integration.py index 0eca974..282a0c8 100644 --- a/tests/integration/test_handler_integration.py +++ b/tests/integration/test_handler_integration.py @@ -4,8 +4,9 @@ import cloudpickle from pathlib import Path -from handler import handler, RemoteExecutor +from remote_executor import RemoteExecutor from remote_execution import FunctionRequest +from live_serverless import handler class TestHandlerIntegration: diff --git a/tests/unit/test_handler.py b/tests/unit/test_handler.py index 27bfb7b..eac2dd3 100644 --- a/tests/unit/test_handler.py +++ b/tests/unit/test_handler.py @@ -23,7 +23,7 @@ async def test_handler_success(self): } } - with patch("handler.RemoteExecutor") as mock_executor_class: + with patch("live_serverless.RemoteExecutor") as mock_executor_class: mock_executor = AsyncMock() mock_executor_class.return_value = mock_executor mock_executor.ExecuteFunction.return_value = FunctionResponse( @@ -76,7 +76,7 @@ async def test_handler_executor_exception(self): } } - with patch("handler.RemoteExecutor") as mock_executor_class: + with patch("live_serverless.RemoteExecutor") as mock_executor_class: mock_executor_class.side_effect = Exception( "Executor initialization failed" ) @@ -100,7 +100,7 @@ async def test_handler_response_serialization(self): } test_data = {"data": "test"} - with patch("handler.RemoteExecutor") as mock_executor_class: + with patch("live_serverless.RemoteExecutor") as mock_executor_class: mock_executor = AsyncMock() mock_executor_class.return_value = mock_executor mock_executor.ExecuteFunction.return_value = FunctionResponse( @@ -130,7 +130,7 @@ async def test_handler_class_execution(self): } } - with patch("handler.RemoteExecutor") as mock_executor_class: + with patch("live_serverless.RemoteExecutor") as mock_executor_class: mock_executor = AsyncMock() mock_executor_class.return_value = mock_executor mock_executor.ExecuteFunction.return_value = FunctionResponse( From 37564202305808e08a6d4f8d553fed26e91fe5f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 20:20:40 -0700 Subject: [PATCH 04/22] build: `make setup` where it applies --- .github/workflows/ci.yml | 25 +++++-------------------- .github/workflows/deploy.yml | 5 +---- Makefile | 5 +++-- 3 files changed, 9 insertions(+), 26 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 38b4a42..f280f9e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -106,10 +106,7 @@ jobs: enable-cache: true - name: Setup dependencies - run: | - uv sync - git submodule update - cp tetra-rp/src/tetra_rp/protos/remote_execution.py src/ + run: make setup - name: Build CPU Docker image uses: docker/build-push-action@v6 @@ -186,10 +183,7 @@ jobs: enable-cache: true - name: Setup dependencies - run: | - uv sync - git submodule update - cp tetra-rp/src/tetra_rp/protos/remote_execution.py src/ + run: make setup - name: Build and push GPU Docker image (main) uses: docker/build-push-action@v6 @@ -239,10 +233,7 @@ jobs: enable-cache: true - name: Setup dependencies - run: | - uv sync - git submodule update - cp tetra-rp/src/tetra_rp/protos/remote_execution.py src/ + run: make setup - name: Build and push CPU Docker image (main) uses: docker/build-push-action@v6 @@ -302,10 +293,7 @@ jobs: enable-cache: true - name: Setup dependencies - run: | - uv sync - git submodule update - cp tetra-rp/src/tetra_rp/protos/remote_execution.py src/ + run: make setup - name: Build and push GPU Docker image (prod) uses: docker/build-push-action@v6 @@ -365,10 +353,7 @@ jobs: enable-cache: true - name: Setup dependencies - run: | - uv sync - git submodule update - cp tetra-rp/src/tetra_rp/protos/remote_execution.py src/ + run: make setup - name: Build and push CPU Docker image (prod) uses: docker/build-push-action@v6 diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 61e631c..1094b59 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -82,10 +82,7 @@ jobs: enable-cache: true - name: Setup dependencies - run: | - uv sync - git submodule update - cp tetra-rp/src/tetra_rp/protos/remote_execution.py src/ + run: make setup - name: Build and push GPU Docker image uses: docker/build-push-action@v6 diff --git a/Makefile b/Makefile index e6c24ed..ee8cc15 100644 --- a/Makefile +++ b/Makefile @@ -32,14 +32,15 @@ clean: # Remove build artifacts and cache files find . -type f -name "*.pyc" -delete find . -type f -name "*.pkl" -delete -setup: dev # Initialize project, sync deps, update submodules +setup: # Initialize project, sync deps, update submodules + uv sync @if [ ! -f "tetra-rp/.git" ]; then \ git submodule update --init --recursive; \ fi make protocols protocols: # Copy remote_execution protocol from submodule - cp tetra-rp/src/tetra_rp/protos/remote_execution.py src/ + cp tetra-rp/src/tetra_rp/protos/remote_execution.py src/live_serverless/ build: # Build both GPU and CPU Docker images make build-gpu From 97801479f0e8e6fe703f1155e187d9b77ec2ef2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 20:26:36 -0700 Subject: [PATCH 05/22] refactor: remote_execution --> live_serverless.remote_execution --- src/class_executor.py | 2 +- src/dependency_installer.py | 2 +- src/function_executor.py | 2 +- src/live_serverless/__init__.py | 2 +- src/remote_executor.py | 2 +- src/subprocess_utils.py | 2 +- tests/conftest.py | 2 +- tests/integration/test_dependency_management.py | 6 +++--- tests/integration/test_handler_integration.py | 2 +- tests/unit/test_cache_sync_manager.py | 2 +- tests/unit/test_class_executor.py | 2 +- tests/unit/test_dependency_installer.py | 2 +- tests/unit/test_function_executor.py | 2 +- tests/unit/test_handler.py | 2 +- tests/unit/test_remote_execution.py | 2 +- tests/unit/test_remote_executor.py | 8 ++++---- 16 files changed, 21 insertions(+), 21 deletions(-) diff --git a/src/class_executor.py b/src/class_executor.py index 9347ed7..08b95aa 100644 --- a/src/class_executor.py +++ b/src/class_executor.py @@ -6,7 +6,7 @@ from datetime import datetime from typing import Dict, Any, Tuple -from remote_execution import FunctionRequest, FunctionResponse +from live_serverless.remote_execution import FunctionRequest, FunctionResponse from serialization_utils import SerializationUtils diff --git a/src/dependency_installer.py b/src/dependency_installer.py index 85c018d..0017e55 100644 --- a/src/dependency_installer.py +++ b/src/dependency_installer.py @@ -4,7 +4,7 @@ import platform from typing import List -from remote_execution import FunctionResponse +from live_serverless.remote_execution import FunctionResponse from constants import LARGE_SYSTEM_PACKAGES, NAMESPACE from subprocess_utils import run_logged_subprocess diff --git a/src/function_executor.py b/src/function_executor.py index 7f94690..5901aad 100644 --- a/src/function_executor.py +++ b/src/function_executor.py @@ -4,7 +4,7 @@ from contextlib import redirect_stdout, redirect_stderr from typing import Dict, Any -from remote_execution import FunctionRequest, FunctionResponse +from live_serverless.remote_execution import FunctionRequest, FunctionResponse from serialization_utils import SerializationUtils diff --git a/src/live_serverless/__init__.py b/src/live_serverless/__init__.py index cde95da..f25ab7a 100644 --- a/src/live_serverless/__init__.py +++ b/src/live_serverless/__init__.py @@ -1,6 +1,6 @@ from typing import Dict, Any -from remote_execution import FunctionRequest, FunctionResponse +from live_serverless.remote_execution import FunctionRequest, FunctionResponse from remote_executor import RemoteExecutor diff --git a/src/remote_executor.py b/src/remote_executor.py index 1577e46..76896b2 100644 --- a/src/remote_executor.py +++ b/src/remote_executor.py @@ -1,7 +1,7 @@ import logging import asyncio from typing import List, Any -from remote_execution import FunctionRequest, FunctionResponse, RemoteExecutorStub +from live_serverless.remote_execution import FunctionRequest, FunctionResponse, RemoteExecutorStub from dependency_installer import DependencyInstaller from function_executor import FunctionExecutor from class_executor import ClassExecutor diff --git a/src/subprocess_utils.py b/src/subprocess_utils.py index b2b3e16..e66ca8e 100644 --- a/src/subprocess_utils.py +++ b/src/subprocess_utils.py @@ -11,7 +11,7 @@ import inspect from typing import List, Optional, Any -from remote_execution import FunctionResponse +from live_serverless.remote_execution import FunctionResponse def run_logged_subprocess( diff --git a/tests/conftest.py b/tests/conftest.py index 1603187..06e4368 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,7 +2,7 @@ import base64 import cloudpickle from unittest.mock import MagicMock -from remote_execution import FunctionRequest +from live_serverless.remote_execution import FunctionRequest from remote_executor import RemoteExecutor diff --git a/tests/integration/test_dependency_management.py b/tests/integration/test_dependency_management.py index b77b960..bef4cfe 100644 --- a/tests/integration/test_dependency_management.py +++ b/tests/integration/test_dependency_management.py @@ -1,7 +1,7 @@ import pytest from unittest.mock import patch, AsyncMock from remote_executor import RemoteExecutor -from remote_execution import FunctionRequest, FunctionResponse +from live_serverless.remote_execution import FunctionRequest, FunctionResponse class TestDependencyManagement: @@ -89,7 +89,7 @@ def test_with_deps(): patch.object(executor.function_executor, "execute") as mock_execute, ): # Mock successful dependency installations - from remote_execution import FunctionResponse + from live_serverless.remote_execution import FunctionResponse mock_sys_deps.return_value = FunctionResponse( success=True, stdout="system deps installed" @@ -176,7 +176,7 @@ async def test_dependency_failure_stops_execution(self): patch.object(executor.function_executor, "execute") as mock_execute, ): # Mock failed dependency installation - from remote_execution import FunctionResponse + from live_serverless.remote_execution import FunctionResponse mock_deps.return_value = FunctionResponse( success=False, diff --git a/tests/integration/test_handler_integration.py b/tests/integration/test_handler_integration.py index 282a0c8..2d79eb3 100644 --- a/tests/integration/test_handler_integration.py +++ b/tests/integration/test_handler_integration.py @@ -5,7 +5,7 @@ from pathlib import Path from remote_executor import RemoteExecutor -from remote_execution import FunctionRequest +from live_serverless.remote_execution import FunctionRequest from live_serverless import handler diff --git a/tests/unit/test_cache_sync_manager.py b/tests/unit/test_cache_sync_manager.py index 153c9d7..9cbee67 100644 --- a/tests/unit/test_cache_sync_manager.py +++ b/tests/unit/test_cache_sync_manager.py @@ -3,7 +3,7 @@ from unittest.mock import patch from pathlib import Path from cache_sync_manager import CacheSyncManager -from remote_execution import FunctionResponse +from live_serverless.remote_execution import FunctionResponse @pytest.fixture diff --git a/tests/unit/test_class_executor.py b/tests/unit/test_class_executor.py index 727231d..a0fb3bd 100644 --- a/tests/unit/test_class_executor.py +++ b/tests/unit/test_class_executor.py @@ -5,7 +5,7 @@ from datetime import datetime from class_executor import ClassExecutor -from remote_execution import FunctionRequest +from live_serverless.remote_execution import FunctionRequest class TestClassExecution: diff --git a/tests/unit/test_dependency_installer.py b/tests/unit/test_dependency_installer.py index e30a81c..fd42c38 100644 --- a/tests/unit/test_dependency_installer.py +++ b/tests/unit/test_dependency_installer.py @@ -3,7 +3,7 @@ from unittest.mock import patch from dependency_installer import DependencyInstaller -from remote_execution import FunctionResponse +from live_serverless.remote_execution import FunctionResponse class TestSystemDependencies: diff --git a/tests/unit/test_function_executor.py b/tests/unit/test_function_executor.py index 815e326..072afe2 100644 --- a/tests/unit/test_function_executor.py +++ b/tests/unit/test_function_executor.py @@ -4,7 +4,7 @@ import cloudpickle from function_executor import FunctionExecutor -from remote_execution import FunctionRequest +from live_serverless.remote_execution import FunctionRequest class TestFunctionExecution: diff --git a/tests/unit/test_handler.py b/tests/unit/test_handler.py index eac2dd3..cc8940c 100644 --- a/tests/unit/test_handler.py +++ b/tests/unit/test_handler.py @@ -5,7 +5,7 @@ import cloudpickle from unittest.mock import patch, AsyncMock from handler import handler -from remote_execution import FunctionResponse +from live_serverless.remote_execution import FunctionResponse class TestHandler: diff --git a/tests/unit/test_remote_execution.py b/tests/unit/test_remote_execution.py index 172ffee..b788abd 100644 --- a/tests/unit/test_remote_execution.py +++ b/tests/unit/test_remote_execution.py @@ -2,7 +2,7 @@ import base64 import cloudpickle from pydantic import ValidationError -from remote_execution import FunctionRequest, FunctionResponse +from live_serverless.remote_execution import FunctionRequest, FunctionResponse class TestFunctionRequest: diff --git a/tests/unit/test_remote_executor.py b/tests/unit/test_remote_executor.py index 379525e..d6da939 100644 --- a/tests/unit/test_remote_executor.py +++ b/tests/unit/test_remote_executor.py @@ -4,7 +4,7 @@ from unittest.mock import Mock, patch, AsyncMock from remote_executor import RemoteExecutor -from remote_execution import FunctionRequest +from live_serverless.remote_execution import FunctionRequest class TestRemoteExecutor: @@ -107,7 +107,7 @@ async def test_execute_function_with_dependencies_orchestration(self): self.executor.function_executor, "execute" ) as mock_execute: # Mock async methods with proper FunctionResponse returns - from remote_execution import FunctionResponse + from live_serverless.remote_execution import FunctionResponse mock_sys_deps_async.return_value = FunctionResponse( success=True, stdout="System deps installed" @@ -146,7 +146,7 @@ async def test_execute_function_dependency_failure_stops_execution(self): self.executor.function_executor, "execute" ) as mock_execute: # Mock async method with FunctionResponse - from remote_execution import FunctionResponse + from live_serverless.remote_execution import FunctionResponse mock_py_deps_async.return_value = FunctionResponse( success=False, error="Package not found" @@ -223,7 +223,7 @@ async def test_hydration_before_installation_with_dependencies(self): new_callable=AsyncMock, ) as mock_sync, ): - from remote_execution import FunctionResponse + from live_serverless.remote_execution import FunctionResponse mock_deps.return_value = FunctionResponse( success=True, stdout="Deps installed" From 96ef6ed0a12ce117d2596b1c39d2587c1102759a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 20:31:26 -0700 Subject: [PATCH 06/22] refactor: remote_executor --> live_serverless.remote_executor --- src/live_serverless/__init__.py | 4 ++-- src/{ => live_serverless}/remote_executor.py | 0 tests/conftest.py | 2 +- tests/integration/test_dependency_management.py | 2 +- tests/integration/test_handler_integration.py | 2 +- tests/unit/test_remote_executor.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) rename src/{ => live_serverless}/remote_executor.py (100%) diff --git a/src/live_serverless/__init__.py b/src/live_serverless/__init__.py index f25ab7a..8b90329 100644 --- a/src/live_serverless/__init__.py +++ b/src/live_serverless/__init__.py @@ -1,7 +1,7 @@ from typing import Dict, Any -from live_serverless.remote_execution import FunctionRequest, FunctionResponse -from remote_executor import RemoteExecutor +from .remote_execution import FunctionRequest, FunctionResponse +from .remote_executor import RemoteExecutor async def handler(event: Dict[str, Any]) -> Dict[str, Any]: diff --git a/src/remote_executor.py b/src/live_serverless/remote_executor.py similarity index 100% rename from src/remote_executor.py rename to src/live_serverless/remote_executor.py diff --git a/tests/conftest.py b/tests/conftest.py index 06e4368..3ab264e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,7 +3,7 @@ import cloudpickle from unittest.mock import MagicMock from live_serverless.remote_execution import FunctionRequest -from remote_executor import RemoteExecutor +from live_serverless.remote_executor import RemoteExecutor @pytest.fixture diff --git a/tests/integration/test_dependency_management.py b/tests/integration/test_dependency_management.py index bef4cfe..2dfab69 100644 --- a/tests/integration/test_dependency_management.py +++ b/tests/integration/test_dependency_management.py @@ -1,6 +1,6 @@ import pytest from unittest.mock import patch, AsyncMock -from remote_executor import RemoteExecutor +from live_serverless.remote_executor import RemoteExecutor from live_serverless.remote_execution import FunctionRequest, FunctionResponse diff --git a/tests/integration/test_handler_integration.py b/tests/integration/test_handler_integration.py index 2d79eb3..e76e8df 100644 --- a/tests/integration/test_handler_integration.py +++ b/tests/integration/test_handler_integration.py @@ -4,7 +4,7 @@ import cloudpickle from pathlib import Path -from remote_executor import RemoteExecutor +from live_serverless.remote_executor import RemoteExecutor from live_serverless.remote_execution import FunctionRequest from live_serverless import handler diff --git a/tests/unit/test_remote_executor.py b/tests/unit/test_remote_executor.py index d6da939..8b1666c 100644 --- a/tests/unit/test_remote_executor.py +++ b/tests/unit/test_remote_executor.py @@ -3,7 +3,7 @@ import cloudpickle from unittest.mock import Mock, patch, AsyncMock -from remote_executor import RemoteExecutor +from live_serverless.remote_executor import RemoteExecutor from live_serverless.remote_execution import FunctionRequest From ed1bc90ee64c8473ec7ae1724c528c489b9d6719 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 20:38:51 -0700 Subject: [PATCH 07/22] refactor: dependency_installer --> live_serverless.dependency_installer --- .../dependency_installer.py | 0 src/live_serverless/remote_executor.py | 2 +- .../integration/test_dependency_management.py | 18 +++++----- tests/unit/test_dependency_installer.py | 34 +++++++++---------- 4 files changed, 27 insertions(+), 27 deletions(-) rename src/{ => live_serverless}/dependency_installer.py (100%) diff --git a/src/dependency_installer.py b/src/live_serverless/dependency_installer.py similarity index 100% rename from src/dependency_installer.py rename to src/live_serverless/dependency_installer.py diff --git a/src/live_serverless/remote_executor.py b/src/live_serverless/remote_executor.py index 76896b2..40e66cb 100644 --- a/src/live_serverless/remote_executor.py +++ b/src/live_serverless/remote_executor.py @@ -2,7 +2,7 @@ import asyncio from typing import List, Any from live_serverless.remote_execution import FunctionRequest, FunctionResponse, RemoteExecutorStub -from dependency_installer import DependencyInstaller +from live_serverless.dependency_installer import DependencyInstaller from function_executor import FunctionExecutor from class_executor import ClassExecutor from log_streamer import start_log_streaming, stop_log_streaming, get_streamed_logs diff --git a/tests/integration/test_dependency_management.py b/tests/integration/test_dependency_management.py index 2dfab69..cc7bf50 100644 --- a/tests/integration/test_dependency_management.py +++ b/tests/integration/test_dependency_management.py @@ -12,7 +12,7 @@ def test_install_python_dependencies_integration(self): """Test Python dependency installation with mocked subprocess.""" executor = RemoteExecutor() - with patch("dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: # Mock successful installation mock_subprocess.return_value = FunctionResponse( success=True, stdout="Successfully installed package-1.0.0" @@ -35,7 +35,7 @@ def test_install_system_dependencies_integration(self, mock_platform): mock_platform.return_value = "Linux" executor = RemoteExecutor() - with patch("dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: # Mock successful apt-get update and install mock_subprocess.side_effect = [ FunctionResponse(success=True, stdout="update success"), @@ -121,7 +121,7 @@ def test_dependency_installation_failure_handling(self): """Test proper error handling when dependency installation fails.""" executor = RemoteExecutor() - with patch("dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: # Mock failed installation mock_subprocess.return_value = FunctionResponse( success=False, error="E: Unable to locate package nonexistent-package" @@ -141,7 +141,7 @@ def test_system_dependency_update_failure(self, mock_platform): mock_platform.return_value = "Linux" executor = RemoteExecutor() - with patch("dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: # Mock failed update mock_subprocess.return_value = FunctionResponse( success=False, @@ -217,7 +217,7 @@ def test_dependency_command_construction(self, mock_platform): mock_platform.return_value = "Linux" executor = RemoteExecutor() - with patch("dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: mock_subprocess.return_value = FunctionResponse( success=True, stdout="success" ) @@ -230,7 +230,7 @@ def test_dependency_command_construction(self, mock_platform): # Verify subprocess utility was called mock_subprocess.assert_called() - with patch("dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: # Mock successful update and install processes mock_subprocess.side_effect = [ FunctionResponse(success=True, stdout=""), @@ -252,7 +252,7 @@ def test_system_dependency_installation_with_nala_acceleration(self, mock_platfo mock_platform.return_value = "Linux" executor = RemoteExecutor() - with patch("dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: # Mock nala availability check, update, and install mock_subprocess.side_effect = [ FunctionResponse(success=True, stdout="/usr/bin/nala"), @@ -279,7 +279,7 @@ def test_system_dependency_installation_no_nala_available(self, mock_platform): mock_platform.return_value = "Linux" executor = RemoteExecutor() - with patch("dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: # Mock nala not available, then successful apt-get operations mock_subprocess.side_effect = [ FunctionResponse(success=False, error="which: nala: not found"), @@ -305,7 +305,7 @@ def test_exception_handling_in_dependency_installation(self, mock_platform): executor = RemoteExecutor() with patch( - "dependency_installer.run_logged_subprocess", + "live_serverless.dependency_installer.run_logged_subprocess", side_effect=Exception("Subprocess error"), ): # Test Python dependency exception diff --git a/tests/unit/test_dependency_installer.py b/tests/unit/test_dependency_installer.py index fd42c38..4e49f08 100644 --- a/tests/unit/test_dependency_installer.py +++ b/tests/unit/test_dependency_installer.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from dependency_installer import DependencyInstaller +from live_serverless.dependency_installer import DependencyInstaller from live_serverless.remote_execution import FunctionResponse @@ -14,7 +14,7 @@ def setup_method(self): self.installer = DependencyInstaller() @patch("platform.system") - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_system_dependencies_success(self, mock_subprocess, mock_platform): """Test successful system dependency installation with small packages (no nala acceleration).""" mock_platform.return_value = "Linux" @@ -33,7 +33,7 @@ def test_install_system_dependencies_success(self, mock_subprocess, mock_platfor assert mock_subprocess.call_count == 2 @patch("platform.system") - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_system_dependencies_update_failure( self, mock_subprocess, mock_platform ): @@ -67,7 +67,7 @@ def setup_method(self): """Setup for each test method.""" self.installer = DependencyInstaller() - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_nala_availability_check_available(self, mock_subprocess): """Test nala availability detection when nala is available.""" mock_subprocess.return_value = FunctionResponse( @@ -83,7 +83,7 @@ def test_nala_availability_check_available(self, mock_subprocess): # Should only call subprocess once due to caching assert mock_subprocess.call_count == 1 - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_nala_availability_check_unavailable(self, mock_subprocess): """Test nala availability detection when nala is not available.""" mock_subprocess.return_value = FunctionResponse( @@ -92,7 +92,7 @@ def test_nala_availability_check_unavailable(self, mock_subprocess): assert self.installer._check_nala_available() is False - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_nala_availability_check_exception(self, mock_subprocess): """Test nala availability detection when subprocess raises exception.""" mock_subprocess.side_effect = Exception("Command failed") @@ -114,7 +114,7 @@ def test_identify_large_system_packages_empty(self): assert large_packages == [] - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_system_with_nala_success(self, mock_subprocess): """Test successful system package installation with nala.""" # Mock successful nala update and install @@ -129,7 +129,7 @@ def test_install_system_with_nala_success(self, mock_subprocess): assert "Installed with nala" in result.stdout assert mock_subprocess.call_count == 2 - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_system_with_nala_update_failure_fallback(self, mock_subprocess): """Test nala installation fallback when update fails.""" # Mock failed nala update, then successful apt-get operations for fallback @@ -145,7 +145,7 @@ def test_install_system_with_nala_update_failure_fallback(self, mock_subprocess) assert "Installed with nala" not in result.stdout @patch("platform.system") - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_system_dependencies_with_acceleration( self, mock_subprocess, mock_platform ): @@ -166,7 +166,7 @@ def test_install_system_dependencies_with_acceleration( assert result.success is True assert "Installed with nala" in result.stdout - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_system_dependencies_without_acceleration(self, mock_subprocess): """Test system dependency installation with acceleration disabled.""" # Mock successful apt-get operations @@ -182,7 +182,7 @@ def test_install_system_dependencies_without_acceleration(self, mock_subprocess) assert result.success is True assert "Installed with nala" not in result.stdout - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_system_dependencies_no_large_packages(self, mock_subprocess): """Test system dependency installation when no large packages are present.""" # Mock successful apt-get operations (should fallback to standard) @@ -206,7 +206,7 @@ def setup_method(self): """Setup for each test method.""" self.installer = DependencyInstaller() - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_dependencies_success(self, mock_subprocess): """Test successful Python dependency installation.""" mock_subprocess.return_value = FunctionResponse( @@ -220,7 +220,7 @@ def test_install_dependencies_success(self, mock_subprocess): # Verify subprocess utility was called mock_subprocess.assert_called_once() - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_dependencies_failure(self, mock_subprocess): """Test Python dependency installation failure.""" mock_subprocess.return_value = FunctionResponse( @@ -239,7 +239,7 @@ def test_install_dependencies_empty_list(self): assert result.success is True assert "No packages to install" in result.stdout - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_dependencies_with_acceleration_enabled(self, mock_subprocess): """Test Python dependency installation with acceleration enabled (uses UV).""" mock_subprocess.return_value = FunctionResponse( @@ -255,7 +255,7 @@ def test_install_dependencies_with_acceleration_enabled(self, mock_subprocess): # Verify subprocess utility was called mock_subprocess.assert_called_once() - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_dependencies_with_acceleration_disabled(self, mock_subprocess): """Test Python dependency installation with acceleration disabled (uses pip).""" mock_subprocess.return_value = FunctionResponse( @@ -271,7 +271,7 @@ def test_install_dependencies_with_acceleration_disabled(self, mock_subprocess): # Verify subprocess utility was called mock_subprocess.assert_called_once() - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_dependencies_exception(self, mock_subprocess): """Test Python dependency installation exception handling.""" mock_subprocess.side_effect = Exception("Subprocess error") @@ -281,7 +281,7 @@ def test_install_dependencies_exception(self, mock_subprocess): assert result.success is False assert "Subprocess error" in result.error - @patch("dependency_installer.run_logged_subprocess") + @patch("live_serverless.dependency_installer.run_logged_subprocess") def test_install_dependencies_timeout(self, mock_subprocess): """Test Python dependency installation timeout handling.""" mock_subprocess.return_value = FunctionResponse( From 93848d849a69691e2897604cb77941930c3cf8d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 20:41:18 -0700 Subject: [PATCH 08/22] refactor: cache_sync_manager --> live_serverless.cache_sync_manager --- src/{ => live_serverless}/cache_sync_manager.py | 0 src/live_serverless/remote_executor.py | 2 +- tests/unit/test_cache_sync_manager.py | 6 +++--- 3 files changed, 4 insertions(+), 4 deletions(-) rename src/{ => live_serverless}/cache_sync_manager.py (100%) diff --git a/src/cache_sync_manager.py b/src/live_serverless/cache_sync_manager.py similarity index 100% rename from src/cache_sync_manager.py rename to src/live_serverless/cache_sync_manager.py diff --git a/src/live_serverless/remote_executor.py b/src/live_serverless/remote_executor.py index 40e66cb..4b6ab0f 100644 --- a/src/live_serverless/remote_executor.py +++ b/src/live_serverless/remote_executor.py @@ -6,7 +6,7 @@ from function_executor import FunctionExecutor from class_executor import ClassExecutor from log_streamer import start_log_streaming, stop_log_streaming, get_streamed_logs -from cache_sync_manager import CacheSyncManager +from live_serverless.cache_sync_manager import CacheSyncManager from constants import NAMESPACE diff --git a/tests/unit/test_cache_sync_manager.py b/tests/unit/test_cache_sync_manager.py index 9cbee67..fb81d78 100644 --- a/tests/unit/test_cache_sync_manager.py +++ b/tests/unit/test_cache_sync_manager.py @@ -2,7 +2,7 @@ import pytest from unittest.mock import patch from pathlib import Path -from cache_sync_manager import CacheSyncManager +from live_serverless.cache_sync_manager import CacheSyncManager from live_serverless.remote_execution import FunctionResponse @@ -88,7 +88,7 @@ def test_mark_baseline_stores_timestamp(self, cache_sync, mock_env): """Test that mark_baseline stores current timestamp.""" with ( patch.object(cache_sync, "should_sync", return_value=True), - patch("cache_sync_manager.datetime") as mock_datetime, + patch("live_serverless.cache_sync_manager.datetime") as mock_datetime, ): # Mock datetime.now().timestamp() mock_now = mock_datetime.now.return_value @@ -102,7 +102,7 @@ def test_mark_baseline_handles_exception(self, cache_sync, mock_env): """Test that mark_baseline handles exceptions gracefully.""" with ( patch.object(cache_sync, "should_sync", return_value=True), - patch("cache_sync_manager.datetime") as mock_datetime, + patch("live_serverless.cache_sync_manager.datetime") as mock_datetime, ): mock_datetime.now.side_effect = Exception("Time error") From 40c85437af1d9079a94d09daee870b5322312b29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 20:43:28 -0700 Subject: [PATCH 09/22] refactor: subprocess_utils --> live_serverless.subprocess_utils --- src/live_serverless/cache_sync_manager.py | 2 +- src/live_serverless/dependency_installer.py | 2 +- src/{ => live_serverless}/subprocess_utils.py | 0 3 files changed, 2 insertions(+), 2 deletions(-) rename src/{ => live_serverless}/subprocess_utils.py (100%) diff --git a/src/live_serverless/cache_sync_manager.py b/src/live_serverless/cache_sync_manager.py index 0726730..6e2810b 100644 --- a/src/live_serverless/cache_sync_manager.py +++ b/src/live_serverless/cache_sync_manager.py @@ -6,7 +6,7 @@ from pathlib import Path from typing import Optional from constants import NAMESPACE, CACHE_DIR, VOLUME_CACHE_PATH -from subprocess_utils import run_logged_subprocess +from live_serverless.subprocess_utils import run_logged_subprocess class CacheSyncManager: diff --git a/src/live_serverless/dependency_installer.py b/src/live_serverless/dependency_installer.py index 0017e55..2a72ac6 100644 --- a/src/live_serverless/dependency_installer.py +++ b/src/live_serverless/dependency_installer.py @@ -6,7 +6,7 @@ from live_serverless.remote_execution import FunctionResponse from constants import LARGE_SYSTEM_PACKAGES, NAMESPACE -from subprocess_utils import run_logged_subprocess +from live_serverless.subprocess_utils import run_logged_subprocess class DependencyInstaller: diff --git a/src/subprocess_utils.py b/src/live_serverless/subprocess_utils.py similarity index 100% rename from src/subprocess_utils.py rename to src/live_serverless/subprocess_utils.py From ae0b7eed04b0d8bf42c0c8843e135a0741f0e9be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 20:45:13 -0700 Subject: [PATCH 10/22] refactor: constants --> live_serverless.constants --- src/live_serverless/cache_sync_manager.py | 2 +- src/{ => live_serverless}/constants.py | 0 src/live_serverless/dependency_installer.py | 2 +- src/live_serverless/remote_executor.py | 2 +- 4 files changed, 3 insertions(+), 3 deletions(-) rename src/{ => live_serverless}/constants.py (100%) diff --git a/src/live_serverless/cache_sync_manager.py b/src/live_serverless/cache_sync_manager.py index 6e2810b..e9bc5e6 100644 --- a/src/live_serverless/cache_sync_manager.py +++ b/src/live_serverless/cache_sync_manager.py @@ -5,7 +5,7 @@ from datetime import datetime from pathlib import Path from typing import Optional -from constants import NAMESPACE, CACHE_DIR, VOLUME_CACHE_PATH +from live_serverless.constants import NAMESPACE, CACHE_DIR, VOLUME_CACHE_PATH from live_serverless.subprocess_utils import run_logged_subprocess diff --git a/src/constants.py b/src/live_serverless/constants.py similarity index 100% rename from src/constants.py rename to src/live_serverless/constants.py diff --git a/src/live_serverless/dependency_installer.py b/src/live_serverless/dependency_installer.py index 2a72ac6..f04dab1 100644 --- a/src/live_serverless/dependency_installer.py +++ b/src/live_serverless/dependency_installer.py @@ -5,7 +5,7 @@ from typing import List from live_serverless.remote_execution import FunctionResponse -from constants import LARGE_SYSTEM_PACKAGES, NAMESPACE +from live_serverless.constants import LARGE_SYSTEM_PACKAGES, NAMESPACE from live_serverless.subprocess_utils import run_logged_subprocess diff --git a/src/live_serverless/remote_executor.py b/src/live_serverless/remote_executor.py index 4b6ab0f..398f5a0 100644 --- a/src/live_serverless/remote_executor.py +++ b/src/live_serverless/remote_executor.py @@ -7,7 +7,7 @@ from class_executor import ClassExecutor from log_streamer import start_log_streaming, stop_log_streaming, get_streamed_logs from live_serverless.cache_sync_manager import CacheSyncManager -from constants import NAMESPACE +from live_serverless.constants import NAMESPACE class RemoteExecutor(RemoteExecutorStub): From 7fde6d0e53be475da100fefc199e87a70f8f7fab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 20:47:18 -0700 Subject: [PATCH 11/22] refactor: log_streamer --> live_serverless.log_streamer --- src/{ => live_serverless}/log_streamer.py | 0 src/live_serverless/remote_executor.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename src/{ => live_serverless}/log_streamer.py (100%) diff --git a/src/log_streamer.py b/src/live_serverless/log_streamer.py similarity index 100% rename from src/log_streamer.py rename to src/live_serverless/log_streamer.py diff --git a/src/live_serverless/remote_executor.py b/src/live_serverless/remote_executor.py index 398f5a0..fdf1e7b 100644 --- a/src/live_serverless/remote_executor.py +++ b/src/live_serverless/remote_executor.py @@ -5,7 +5,7 @@ from live_serverless.dependency_installer import DependencyInstaller from function_executor import FunctionExecutor from class_executor import ClassExecutor -from log_streamer import start_log_streaming, stop_log_streaming, get_streamed_logs +from live_serverless.log_streamer import start_log_streaming, stop_log_streaming, get_streamed_logs from live_serverless.cache_sync_manager import CacheSyncManager from live_serverless.constants import NAMESPACE From 5e9b01c5aaceddcf5f18349faae3b70e1ae13d6c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 20:48:44 -0700 Subject: [PATCH 12/22] refactor: class_executor --> live_serverless.class_executor --- src/{ => live_serverless}/class_executor.py | 0 src/live_serverless/remote_executor.py | 2 +- tests/unit/test_class_executor.py | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) rename src/{ => live_serverless}/class_executor.py (100%) diff --git a/src/class_executor.py b/src/live_serverless/class_executor.py similarity index 100% rename from src/class_executor.py rename to src/live_serverless/class_executor.py diff --git a/src/live_serverless/remote_executor.py b/src/live_serverless/remote_executor.py index fdf1e7b..39079b4 100644 --- a/src/live_serverless/remote_executor.py +++ b/src/live_serverless/remote_executor.py @@ -4,7 +4,7 @@ from live_serverless.remote_execution import FunctionRequest, FunctionResponse, RemoteExecutorStub from live_serverless.dependency_installer import DependencyInstaller from function_executor import FunctionExecutor -from class_executor import ClassExecutor +from live_serverless.class_executor import ClassExecutor from live_serverless.log_streamer import start_log_streaming, stop_log_streaming, get_streamed_logs from live_serverless.cache_sync_manager import CacheSyncManager from live_serverless.constants import NAMESPACE diff --git a/tests/unit/test_class_executor.py b/tests/unit/test_class_executor.py index a0fb3bd..0e8f4f1 100644 --- a/tests/unit/test_class_executor.py +++ b/tests/unit/test_class_executor.py @@ -4,7 +4,7 @@ import cloudpickle from datetime import datetime -from class_executor import ClassExecutor +from live_serverless.class_executor import ClassExecutor from live_serverless.remote_execution import FunctionRequest From 6c5cbc3cf01a48800ce458a9f5e136cc74e946c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 20:50:18 -0700 Subject: [PATCH 13/22] refactor: function_executor --> live_serverless.function_executor --- src/{ => live_serverless}/function_executor.py | 0 src/live_serverless/remote_executor.py | 2 +- tests/unit/test_function_executor.py | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) rename src/{ => live_serverless}/function_executor.py (100%) diff --git a/src/function_executor.py b/src/live_serverless/function_executor.py similarity index 100% rename from src/function_executor.py rename to src/live_serverless/function_executor.py diff --git a/src/live_serverless/remote_executor.py b/src/live_serverless/remote_executor.py index 39079b4..0b46b31 100644 --- a/src/live_serverless/remote_executor.py +++ b/src/live_serverless/remote_executor.py @@ -3,7 +3,7 @@ from typing import List, Any from live_serverless.remote_execution import FunctionRequest, FunctionResponse, RemoteExecutorStub from live_serverless.dependency_installer import DependencyInstaller -from function_executor import FunctionExecutor +from live_serverless.function_executor import FunctionExecutor from live_serverless.class_executor import ClassExecutor from live_serverless.log_streamer import start_log_streaming, stop_log_streaming, get_streamed_logs from live_serverless.cache_sync_manager import CacheSyncManager diff --git a/tests/unit/test_function_executor.py b/tests/unit/test_function_executor.py index 072afe2..80f06af 100644 --- a/tests/unit/test_function_executor.py +++ b/tests/unit/test_function_executor.py @@ -3,7 +3,7 @@ import base64 import cloudpickle -from function_executor import FunctionExecutor +from live_serverless.function_executor import FunctionExecutor from live_serverless.remote_execution import FunctionRequest From d37fb14dd2e659e719cf809c9c141561f71e1538 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 20:52:09 -0700 Subject: [PATCH 14/22] refactor: serialization_utils --> live_serverless.serialization_utils --- src/live_serverless/class_executor.py | 2 +- src/live_serverless/function_executor.py | 2 +- src/{ => live_serverless}/serialization_utils.py | 0 tests/unit/test_serialization_utils.py | 2 +- 4 files changed, 3 insertions(+), 3 deletions(-) rename src/{ => live_serverless}/serialization_utils.py (100%) diff --git a/src/live_serverless/class_executor.py b/src/live_serverless/class_executor.py index 08b95aa..2b30037 100644 --- a/src/live_serverless/class_executor.py +++ b/src/live_serverless/class_executor.py @@ -7,7 +7,7 @@ from typing import Dict, Any, Tuple from live_serverless.remote_execution import FunctionRequest, FunctionResponse -from serialization_utils import SerializationUtils +from live_serverless.serialization_utils import SerializationUtils class ClassExecutor: diff --git a/src/live_serverless/function_executor.py b/src/live_serverless/function_executor.py index 5901aad..3884583 100644 --- a/src/live_serverless/function_executor.py +++ b/src/live_serverless/function_executor.py @@ -5,7 +5,7 @@ from typing import Dict, Any from live_serverless.remote_execution import FunctionRequest, FunctionResponse -from serialization_utils import SerializationUtils +from live_serverless.serialization_utils import SerializationUtils class FunctionExecutor: diff --git a/src/serialization_utils.py b/src/live_serverless/serialization_utils.py similarity index 100% rename from src/serialization_utils.py rename to src/live_serverless/serialization_utils.py diff --git a/tests/unit/test_serialization_utils.py b/tests/unit/test_serialization_utils.py index d06e9ed..093a005 100644 --- a/tests/unit/test_serialization_utils.py +++ b/tests/unit/test_serialization_utils.py @@ -2,7 +2,7 @@ import base64 import cloudpickle -from serialization_utils import SerializationUtils +from live_serverless.serialization_utils import SerializationUtils class TestSerializationUtils: From edbd96e5f3b2e93fb49147c4e79455e89d5db803 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 21:09:22 -0700 Subject: [PATCH 15/22] refactor: logger --> live_serverless.logger --- src/handler.py | 2 +- src/live_serverless/log_streamer.py | 2 +- src/{ => live_serverless}/logger.py | 0 src/live_serverless/remote_executor.py | 2 +- 4 files changed, 3 insertions(+), 3 deletions(-) rename src/{ => live_serverless}/logger.py (100%) diff --git a/src/handler.py b/src/handler.py index 0b5412a..a025e3b 100644 --- a/src/handler.py +++ b/src/handler.py @@ -1,5 +1,5 @@ import runpod -from logger import setup_logging +from live_serverless.logger import setup_logging from live_serverless import handler diff --git a/src/live_serverless/log_streamer.py b/src/live_serverless/log_streamer.py index 1ec61b8..f12438a 100644 --- a/src/live_serverless/log_streamer.py +++ b/src/live_serverless/log_streamer.py @@ -11,7 +11,7 @@ from collections import deque from typing import Optional, Deque, Callable -from logger import get_log_format +from .logger import get_log_format class LogStreamer: diff --git a/src/logger.py b/src/live_serverless/logger.py similarity index 100% rename from src/logger.py rename to src/live_serverless/logger.py diff --git a/src/live_serverless/remote_executor.py b/src/live_serverless/remote_executor.py index 0b46b31..247e01b 100644 --- a/src/live_serverless/remote_executor.py +++ b/src/live_serverless/remote_executor.py @@ -38,7 +38,7 @@ async def ExecuteFunction(self, request: FunctionRequest) -> FunctionResponse: """ # Start log streaming to capture all system logs # Use the requested log level, not the root logger level - from logger import get_log_level + from .logger import get_log_level requested_level = get_log_level() start_log_streaming(level=requested_level) From 5d021ae83517cac08a138ba8e108e704e4d94e54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 21:12:16 -0700 Subject: [PATCH 16/22] refactor: relative imports from live_serverless module --- src/__init__.py | 1 - src/live_serverless/cache_sync_manager.py | 5 +++-- src/live_serverless/class_executor.py | 4 ++-- src/live_serverless/constants.py | 2 +- src/live_serverless/dependency_installer.py | 6 +++--- src/live_serverless/function_executor.py | 4 ++-- src/live_serverless/remote_executor.py | 15 ++++++++------- src/live_serverless/subprocess_utils.py | 2 +- 8 files changed, 20 insertions(+), 19 deletions(-) diff --git a/src/__init__.py b/src/__init__.py index 8ae010c..e69de29 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -1 +0,0 @@ -"""Worker Tetra package.""" diff --git a/src/live_serverless/cache_sync_manager.py b/src/live_serverless/cache_sync_manager.py index e9bc5e6..080c3a3 100644 --- a/src/live_serverless/cache_sync_manager.py +++ b/src/live_serverless/cache_sync_manager.py @@ -5,8 +5,9 @@ from datetime import datetime from pathlib import Path from typing import Optional -from live_serverless.constants import NAMESPACE, CACHE_DIR, VOLUME_CACHE_PATH -from live_serverless.subprocess_utils import run_logged_subprocess + +from .constants import NAMESPACE, CACHE_DIR, VOLUME_CACHE_PATH +from .subprocess_utils import run_logged_subprocess class CacheSyncManager: diff --git a/src/live_serverless/class_executor.py b/src/live_serverless/class_executor.py index 2b30037..9c7fe9e 100644 --- a/src/live_serverless/class_executor.py +++ b/src/live_serverless/class_executor.py @@ -6,8 +6,8 @@ from datetime import datetime from typing import Dict, Any, Tuple -from live_serverless.remote_execution import FunctionRequest, FunctionResponse -from live_serverless.serialization_utils import SerializationUtils +from .remote_execution import FunctionRequest, FunctionResponse +from .serialization_utils import SerializationUtils class ClassExecutor: diff --git a/src/live_serverless/constants.py b/src/live_serverless/constants.py index 858ffa7..3e94662 100644 --- a/src/live_serverless/constants.py +++ b/src/live_serverless/constants.py @@ -1,5 +1,5 @@ # Logger Configuration -NAMESPACE = "tetra" +NAMESPACE = "live_serverless" """Application logger namespace for all components.""" # System Package Acceleration with Nala diff --git a/src/live_serverless/dependency_installer.py b/src/live_serverless/dependency_installer.py index f04dab1..5dee1ca 100644 --- a/src/live_serverless/dependency_installer.py +++ b/src/live_serverless/dependency_installer.py @@ -4,9 +4,9 @@ import platform from typing import List -from live_serverless.remote_execution import FunctionResponse -from live_serverless.constants import LARGE_SYSTEM_PACKAGES, NAMESPACE -from live_serverless.subprocess_utils import run_logged_subprocess +from .remote_execution import FunctionResponse +from .constants import LARGE_SYSTEM_PACKAGES, NAMESPACE +from .subprocess_utils import run_logged_subprocess class DependencyInstaller: diff --git a/src/live_serverless/function_executor.py b/src/live_serverless/function_executor.py index 3884583..f07823c 100644 --- a/src/live_serverless/function_executor.py +++ b/src/live_serverless/function_executor.py @@ -4,8 +4,8 @@ from contextlib import redirect_stdout, redirect_stderr from typing import Dict, Any -from live_serverless.remote_execution import FunctionRequest, FunctionResponse -from live_serverless.serialization_utils import SerializationUtils +from .remote_execution import FunctionRequest, FunctionResponse +from .serialization_utils import SerializationUtils class FunctionExecutor: diff --git a/src/live_serverless/remote_executor.py b/src/live_serverless/remote_executor.py index 247e01b..c1f46b0 100644 --- a/src/live_serverless/remote_executor.py +++ b/src/live_serverless/remote_executor.py @@ -1,13 +1,14 @@ import logging import asyncio from typing import List, Any -from live_serverless.remote_execution import FunctionRequest, FunctionResponse, RemoteExecutorStub -from live_serverless.dependency_installer import DependencyInstaller -from live_serverless.function_executor import FunctionExecutor -from live_serverless.class_executor import ClassExecutor -from live_serverless.log_streamer import start_log_streaming, stop_log_streaming, get_streamed_logs -from live_serverless.cache_sync_manager import CacheSyncManager -from live_serverless.constants import NAMESPACE + +from .remote_execution import FunctionRequest, FunctionResponse, RemoteExecutorStub +from .dependency_installer import DependencyInstaller +from .function_executor import FunctionExecutor +from .class_executor import ClassExecutor +from .log_streamer import start_log_streaming, stop_log_streaming, get_streamed_logs +from .cache_sync_manager import CacheSyncManager +from .constants import NAMESPACE class RemoteExecutor(RemoteExecutorStub): diff --git a/src/live_serverless/subprocess_utils.py b/src/live_serverless/subprocess_utils.py index e66ca8e..3789fd1 100644 --- a/src/live_serverless/subprocess_utils.py +++ b/src/live_serverless/subprocess_utils.py @@ -11,7 +11,7 @@ import inspect from typing import List, Optional, Any -from live_serverless.remote_execution import FunctionResponse +from .remote_execution import FunctionResponse def run_logged_subprocess( From 8c0934e35b19e2823b290277ad3570b7484b2a6a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 21:31:12 -0700 Subject: [PATCH 17/22] refactor: setup_logging in live_serverless handler --- src/handler.py | 5 ----- src/live_serverless/__init__.py | 4 ++++ 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/handler.py b/src/handler.py index a025e3b..550a302 100644 --- a/src/handler.py +++ b/src/handler.py @@ -1,12 +1,7 @@ import runpod -from live_serverless.logger import setup_logging from live_serverless import handler -# Initialize logging configuration -setup_logging() - - # Start the RunPod serverless handler if __name__ == "__main__": runpod.serverless.start({"handler": handler}) diff --git a/src/live_serverless/__init__.py b/src/live_serverless/__init__.py index 8b90329..f889eae 100644 --- a/src/live_serverless/__init__.py +++ b/src/live_serverless/__init__.py @@ -1,9 +1,13 @@ from typing import Dict, Any +from .logger import setup_logging from .remote_execution import FunctionRequest, FunctionResponse from .remote_executor import RemoteExecutor +setup_logging() + + async def handler(event: Dict[str, Any]) -> Dict[str, Any]: """ RunPod serverless function handler with dependency installation. From 5beb799fb1d796fd520a1ced8d33e601e53013b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 22:50:31 -0700 Subject: [PATCH 18/22] chore: ruff format --- src/live_serverless/__init__.py | 1 - .../integration/test_dependency_management.py | 32 ++++++++++++++----- 2 files changed, 24 insertions(+), 9 deletions(-) diff --git a/src/live_serverless/__init__.py b/src/live_serverless/__init__.py index f889eae..f66069e 100644 --- a/src/live_serverless/__init__.py +++ b/src/live_serverless/__init__.py @@ -26,4 +26,3 @@ async def handler(event: Dict[str, Any]) -> Dict[str, Any]: ) return output.model_dump() - diff --git a/tests/integration/test_dependency_management.py b/tests/integration/test_dependency_management.py index cc7bf50..f20e7fc 100644 --- a/tests/integration/test_dependency_management.py +++ b/tests/integration/test_dependency_management.py @@ -12,7 +12,9 @@ def test_install_python_dependencies_integration(self): """Test Python dependency installation with mocked subprocess.""" executor = RemoteExecutor() - with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch( + "live_serverless.dependency_installer.run_logged_subprocess" + ) as mock_subprocess: # Mock successful installation mock_subprocess.return_value = FunctionResponse( success=True, stdout="Successfully installed package-1.0.0" @@ -35,7 +37,9 @@ def test_install_system_dependencies_integration(self, mock_platform): mock_platform.return_value = "Linux" executor = RemoteExecutor() - with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch( + "live_serverless.dependency_installer.run_logged_subprocess" + ) as mock_subprocess: # Mock successful apt-get update and install mock_subprocess.side_effect = [ FunctionResponse(success=True, stdout="update success"), @@ -121,7 +125,9 @@ def test_dependency_installation_failure_handling(self): """Test proper error handling when dependency installation fails.""" executor = RemoteExecutor() - with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch( + "live_serverless.dependency_installer.run_logged_subprocess" + ) as mock_subprocess: # Mock failed installation mock_subprocess.return_value = FunctionResponse( success=False, error="E: Unable to locate package nonexistent-package" @@ -141,7 +147,9 @@ def test_system_dependency_update_failure(self, mock_platform): mock_platform.return_value = "Linux" executor = RemoteExecutor() - with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch( + "live_serverless.dependency_installer.run_logged_subprocess" + ) as mock_subprocess: # Mock failed update mock_subprocess.return_value = FunctionResponse( success=False, @@ -217,7 +225,9 @@ def test_dependency_command_construction(self, mock_platform): mock_platform.return_value = "Linux" executor = RemoteExecutor() - with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch( + "live_serverless.dependency_installer.run_logged_subprocess" + ) as mock_subprocess: mock_subprocess.return_value = FunctionResponse( success=True, stdout="success" ) @@ -230,7 +240,9 @@ def test_dependency_command_construction(self, mock_platform): # Verify subprocess utility was called mock_subprocess.assert_called() - with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch( + "live_serverless.dependency_installer.run_logged_subprocess" + ) as mock_subprocess: # Mock successful update and install processes mock_subprocess.side_effect = [ FunctionResponse(success=True, stdout=""), @@ -252,7 +264,9 @@ def test_system_dependency_installation_with_nala_acceleration(self, mock_platfo mock_platform.return_value = "Linux" executor = RemoteExecutor() - with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch( + "live_serverless.dependency_installer.run_logged_subprocess" + ) as mock_subprocess: # Mock nala availability check, update, and install mock_subprocess.side_effect = [ FunctionResponse(success=True, stdout="/usr/bin/nala"), @@ -279,7 +293,9 @@ def test_system_dependency_installation_no_nala_available(self, mock_platform): mock_platform.return_value = "Linux" executor = RemoteExecutor() - with patch("live_serverless.dependency_installer.run_logged_subprocess") as mock_subprocess: + with patch( + "live_serverless.dependency_installer.run_logged_subprocess" + ) as mock_subprocess: # Mock nala not available, then successful apt-get operations mock_subprocess.side_effect = [ FunctionResponse(success=False, error="which: nala: not found"), From 1543c06ffde80bf9934ba441a13dc0c47178d7d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 22:50:55 -0700 Subject: [PATCH 19/22] docs: updated to reflect the live_serverless module --- CLAUDE.md | 60 +++++++++++++--------- docs/Centralized_Log_Streaming_System.md | 14 ++--- docs/System_Python_Runtime_Architecture.md | 23 +++++---- 3 files changed, 55 insertions(+), 42 deletions(-) diff --git a/CLAUDE.md b/CLAUDE.md index bbedca2..b393c61 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -6,18 +6,19 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co This is `worker-tetra`, a RunPod Serverless worker template that provides dynamic GPU provisioning for ML workloads with transparent execution and persistent workspace management. The project consists of two main components: -1. **RunPod Worker Handler** (`src/handler.py`) - A serverless function that executes remote Python functions with dependency management and workspace support -2. **Tetra SDK** (`tetra-rp/` submodule) - Python library for distributed inference and serving of ML models +1. **RunPod Worker Handler** (`src/handler.py`) - A lightweight RunPod wrapper that imports and starts the serverless handler from the `live_serverless` module +2. **Live Serverless Module** (`src/live_serverless/`) - Core implementation that executes remote Python functions with dependency management and workspace support +3. **Tetra SDK** (`tetra-rp/` submodule) - Python library for distributed inference and serving of ML models ## Key Areas of Responsibility -### 1. Remote Function Execution Engine (`src/`) -- **Core Handler** (`src/handler.py:18`): Main RunPod serverless entry point that orchestrates remote execution -- **Remote Executor** (`src/remote_executor.py:11`): Central orchestrator that coordinates all execution components using composition pattern -- **Function Executor** (`src/function_executor.py:12`): Handles individual function execution with full output capture (stdout, stderr, logs) -- **Class Executor** (`src/class_executor.py:14`): Manages class instantiation and method execution with instance persistence and metadata tracking +### 1. Remote Function Execution Engine (`src/live_serverless/`) +- **Core Handler** (`src/live_serverless/__init__.py`): Main serverless handler that orchestrates remote execution +- **Remote Executor** (`src/live_serverless/remote_executor.py`): Central orchestrator that coordinates all execution components using composition pattern +- **Function Executor** (`src/live_serverless/function_executor.py`): Handles individual function execution with full output capture (stdout, stderr, logs) +- **Class Executor** (`src/live_serverless/class_executor.py`): Manages class instantiation and method execution with instance persistence and metadata tracking -### 2. Dependency Management System (`src/dependency_installer.py:14`) +### 2. Dependency Management System (`src/live_serverless/dependency_installer.py`) - **Python Package Installation**: UV-based package management with environment-aware configuration (Docker vs local) - **System Package Installation**: APT/Nala-based system dependency handling with acceleration support - **Differential Installation**: Optimized package installation that skips already-installed packages @@ -25,7 +26,7 @@ This is `worker-tetra`, a RunPod Serverless worker template that provides dynami - **System Package Filtering**: Intelligent detection of system-available packages to avoid redundant installation - **Universal Subprocess Integration**: All subprocess operations use centralized logging utility -### 3. Universal Subprocess Utility (`src/subprocess_utils.py`) +### 3. Universal Subprocess Utility (`src/live_serverless/subprocess_utils.py`) - **Centralized Subprocess Operations**: All subprocess calls use `run_logged_subprocess` for consistency - **Automatic Logging Integration**: All subprocess output flows through log streamer at DEBUG level - **Environment-Aware Execution**: Handles Docker vs local environment differences automatically @@ -33,9 +34,8 @@ This is `worker-tetra`, a RunPod Serverless worker template that provides dynami - **Timeout Management**: Configurable timeouts with proper cleanup on timeout/cancellation ### 4. Serialization & Protocol Management -- **Protocol Definitions** (`src/remote_execution.py:13`): Pydantic models for request/response with validation -- **Serialization Utils** (`src/serialization_utils.py`): CloudPickle-based data serialization for function arguments and results -- **Base Executor** (`src/base_executor.py`): Common execution interface and environment setup +- **Protocol Definitions** (`src/live_serverless/remote_execution.py`): Pydantic models for request/response with validation +- **Serialization Utils** (`src/live_serverless/serialization_utils.py`): CloudPickle-based data serialization for function arguments and results ### 5. Tetra SDK Integration (`tetra-rp/` submodule) - **Client Interface**: `@remote` decorator for marking functions for remote execution @@ -58,20 +58,25 @@ This is `worker-tetra`, a RunPod Serverless worker template that provides dynami - **Release Management**: Automated semantic versioning and Docker Hub deployment ### 8. Configuration & Constants -- **Constants** (`src/constants.py`): System-wide configuration values (NAMESPACE, LARGE_SYSTEM_PACKAGES) +- **Constants** (`src/live_serverless/constants.py`): System-wide configuration values (NAMESPACE, LARGE_SYSTEM_PACKAGES) - **Environment Configuration**: RunPod API integration ## Architecture ### Core Components -- **`src/handler.py`**: Main RunPod serverless handler implementing composition pattern +- **`src/handler.py`**: Lightweight RunPod wrapper that starts the serverless handler + - Imports and initializes the handler from `live_serverless` module + - Entry point for RunPod Serverless execution + +- **`src/live_serverless/`**: Core serverless implementation module + - `__init__.py`: Main handler implementing composition pattern - Executes arbitrary Python functions remotely with workspace support - Handles dynamic installation of Python and system dependencies with differential updates - Serializes/deserializes function arguments and results using cloudpickle - Captures stdout, stderr, and logs from remote execution -- **`src/remote_execution.py`**: Protocol definitions using Pydantic models +- **`src/live_serverless/remote_execution.py`**: Protocol definitions using Pydantic models - `FunctionRequest`: Defines function execution requests with dependencies - `FunctionResponse`: Standardized response format with success/error handling @@ -188,16 +193,21 @@ gpu_config = LiveServerless( ## File Structure Highlights ``` -├── src/ # Core implementation -│ ├── handler.py # Main serverless function handler -│ ├── remote_executor.py # Central execution orchestrator -│ ├── remote_execution.py # Protocol definitions -│ ├── function_executor.py # Function execution with output capture -│ ├── class_executor.py # Class execution with persistence -│ ├── dependency_installer.py # Python and system dependency management -│ ├── serialization_utils.py # CloudPickle serialization utilities -│ ├── base_executor.py # Common execution interface -│ ├── constants.py # System-wide configuration constants +├── src/ # Source directory +│ ├── handler.py # RunPod wrapper (entry point) +│ ├── live_serverless/ # Core serverless implementation module +│ │ ├── __init__.py # Main handler with composition pattern +│ │ ├── remote_executor.py # Central execution orchestrator +│ │ ├── remote_execution.py # Protocol definitions +│ │ ├── function_executor.py # Function execution with output capture +│ │ ├── class_executor.py # Class execution with persistence +│ │ ├── dependency_installer.py # Python and system dependency management +│ │ ├── subprocess_utils.py # Centralized subprocess operations +│ │ ├── log_streamer.py # Log streaming system +│ │ ├── logger.py # Unified logging configuration +│ │ ├── serialization_utils.py # CloudPickle serialization utilities +│ │ ├── cache_sync_manager.py # Workspace sync management +│ │ └── constants.py # System-wide configuration constants │ └── tests/ # Handler test JSON files ├── tests/ # Comprehensive test suite │ ├── conftest.py # Shared test fixtures diff --git a/docs/Centralized_Log_Streaming_System.md b/docs/Centralized_Log_Streaming_System.md index bcc0918..36d4212 100644 --- a/docs/Centralized_Log_Streaming_System.md +++ b/docs/Centralized_Log_Streaming_System.md @@ -6,7 +6,7 @@ This design implements a comprehensive log streaming architecture that captures ## Key Components -### 1. LogStreamer (`src/log_streamer.py`) +### 1. LogStreamer (`src/live_serverless/log_streamer.py`) Thread-safe log capture system that buffers logs and streams them to the response output. ```mermaid @@ -21,7 +21,7 @@ graph TB H[System Operations] --> B ``` -### 2. Centralized Logging (`src/logger.py`) +### 2. Centralized Logging (`src/live_serverless/logger.py`) Unified logging configuration with: - **Debug Format**: `timestamp | level | name | file:line | message` - **Production Format**: `timestamp | level | message` @@ -67,11 +67,11 @@ sequenceDiagram | Component | Purpose | |-----------|---------| -| `src/log_streamer.py` | New centralized log streaming system | -| `src/logger.py` | New unified logging configuration | -| `src/remote_executor.py` | Integration with log streaming | -| `src/dependency_installer.py` | Log capture during installation | -| `src/test_log_streaming.json` | Test case for log visibility | +| `src/live_serverless/log_streamer.py` | New centralized log streaming system | +| `src/live_serverless/logger.py` | New unified logging configuration | +| `src/live_serverless/remote_executor.py` | Integration with log streaming | +| `src/live_serverless/dependency_installer.py` | Log capture during installation | +| `src/tests/test_log_streaming.json` | Test case for log visibility | ## Benefits diff --git a/docs/System_Python_Runtime_Architecture.md b/docs/System_Python_Runtime_Architecture.md index 91ff08a..d66c2af 100644 --- a/docs/System_Python_Runtime_Architecture.md +++ b/docs/System_Python_Runtime_Architecture.md @@ -4,6 +4,8 @@ This design addresses full use of PyTorch installation built into the base Docker image that we use for the runtime. +**Module Structure**: The core implementation resides in the `src/live_serverless/` module. The `src/handler.py` file serves as a lightweight RunPod wrapper that imports and starts the handler from `live_serverless`. + ## Architecture Design ### System Python Runtime @@ -11,16 +13,17 @@ This design addresses full use of PyTorch installation built into the base Docke ```mermaid graph TD A[RunPod Request] --> B[src/handler.py] - B --> C[RemoteExecutor] - C --> D[Environment Detection] - D --> E{Docker?} - E -->|Yes| F[System UV Install] - E -->|No| G[Local UV Install] - F --> H[Function Execution] - G --> H - - J[DependencyInstaller] --> C - K[FunctionExecutor] --> C + B --> C[live_serverless module] + C --> D[RemoteExecutor] + D --> E[Environment Detection] + E --> F{Docker?} + F -->|Yes| G[System UV Install] + F -->|No| H[Local UV Install] + G --> I[Function Execution] + H --> I + + J[DependencyInstaller] --> D + K[FunctionExecutor] --> D ``` ## Key Points From 398d625eec1811095e61a9bff6bd6b8d25fee272 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 15 Oct 2025 23:29:00 -0700 Subject: [PATCH 20/22] feat: add dynamic handler loading via env var `HANDLER_MODULE` Enable container to serve different handlers based on HANDLER_MODULE environment variable, with live_serverless as the default. Changes: - Add load_handler() function with dynamic import logic - Support nested module paths (e.g., handlers.custom.module) - Maintain backward compatibility (defaults to live_serverless) - Add comprehensive error handling with clear messages - Include 15 new tests (11 unit, 4 integration) - Test coverage for error scenarios and module switching Usage: HANDLER_MODULE=custom_handler python handler.py --- src/handler.py | 51 ++++- tests/conftest.py | 34 ++++ tests/integration/test_handler_integration.py | 149 ++++++++++++++ tests/unit/test_handler.py | 2 +- tests/unit/test_handler_loader.py | 189 ++++++++++++++++++ 5 files changed, 423 insertions(+), 2 deletions(-) create mode 100644 tests/unit/test_handler_loader.py diff --git a/src/handler.py b/src/handler.py index 550a302..89a7949 100644 --- a/src/handler.py +++ b/src/handler.py @@ -1,7 +1,56 @@ +import os +import importlib +import sys +from typing import Callable, Dict, Any, cast + import runpod -from live_serverless import handler + + +def load_handler() -> Callable[[Dict[str, Any]], Dict[str, Any]]: + """ + Dynamically load the handler function from the specified module. + + Returns: + The handler function from the specified module + + Raises: + ImportError: If the module cannot be imported + AttributeError: If the module doesn't have a 'handler' function + """ + handler_module_name = os.environ.get("HANDLER_MODULE", "live_serverless") + + try: + # Dynamically import the module + handler_module = importlib.import_module(handler_module_name) + + # Get the handler function + if not hasattr(handler_module, "handler"): + raise AttributeError( + f"Module '{handler_module_name}' does not export a 'handler' function" + ) + + handler_func = getattr(handler_module, "handler") + + if not callable(handler_func): + raise TypeError( + f"'handler' in module '{handler_module_name}' is not callable" + ) + + print(f"Loaded handler from module: {handler_module_name}") + return cast(Callable[[Dict[str, Any]], Dict[str, Any]], handler_func) + + except ImportError as e: + print( + f"Error: Failed to import module '{handler_module_name}': {e}", + file=sys.stderr, + ) + raise + except (AttributeError, TypeError) as e: + print(f"Error: {e}", file=sys.stderr) + raise # Start the RunPod serverless handler if __name__ == "__main__": + handler = load_handler() runpod.serverless.start({"handler": handler}) diff --git a/tests/conftest.py b/tests/conftest.py index 3ab264e..4200bad 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,8 @@ import base64 import cloudpickle from unittest.mock import MagicMock +from types import ModuleType +from typing import Dict, Any from live_serverless.remote_execution import FunctionRequest from live_serverless.remote_executor import RemoteExecutor @@ -20,6 +22,38 @@ def hello_world(): """ +@pytest.fixture +def mock_handler_module(): + """Mock module with a valid handler function.""" + + async def mock_handler(event: Dict[str, Any]) -> Dict[str, Any]: + """Mock handler function.""" + return { + "success": True, + "result": "mock_result", + "error": None, + } + + module = ModuleType("mock_handler_module") + module.handler = mock_handler + return module + + +@pytest.fixture +def mock_invalid_module(): + """Mock module without a handler attribute.""" + module = ModuleType("mock_invalid_module") + return module + + +@pytest.fixture +def mock_non_callable_handler(): + """Mock module with non-callable handler attribute.""" + module = ModuleType("mock_non_callable_handler") + module.handler = "not_a_function" + return module + + @pytest.fixture def sample_function_with_args(): """Test function that takes arguments.""" diff --git a/tests/integration/test_handler_integration.py b/tests/integration/test_handler_integration.py index e76e8df..09f1159 100644 --- a/tests/integration/test_handler_integration.py +++ b/tests/integration/test_handler_integration.py @@ -463,3 +463,152 @@ def process_data(data): assert decoded_result["sum"] == 15 assert decoded_result["name"] == "test" assert decoded_result["processed"] is True + + +class TestHandlerLoading: + """Integration tests for dynamic handler loading.""" + + @pytest.mark.integration + def test_handler_loading_with_default(self, capsys): + """Test that handler.py loads live_serverless by default.""" + import os + from unittest.mock import patch + import importlib + + # Reload handler module to test loading + with patch.dict(os.environ, {}, clear=True): + import handler + + # Reimport to trigger load_handler() + importlib.reload(handler) + + # Note: This test verifies the module can be imported + # The actual load happens in __main__ + + @pytest.mark.integration + @pytest.mark.asyncio + async def test_handler_execution_with_custom_handler_module( + self, tmp_path, monkeypatch + ): + """Test end-to-end execution with a dynamically loaded custom handler.""" + import sys + + # Create a temporary handler module + custom_handler_code = """ +from typing import Dict, Any + +async def handler(event: Dict[str, Any]) -> Dict[str, Any]: + '''Custom handler for testing.''' + return { + "success": True, + "result": "custom_handler_executed", + "error": None, + "stdout": "Custom handler stdout", + } +""" + + # Write the custom handler to temp directory + custom_module_path = tmp_path / "custom_test_handler.py" + custom_module_path.write_text(custom_handler_code) + + # Add temp directory to Python path + sys.path.insert(0, str(tmp_path)) + + try: + # Set environment variable + monkeypatch.setenv("HANDLER_MODULE", "custom_test_handler") + + # Import and use load_handler + from handler import load_handler + + loaded_handler = load_handler() + + # Execute the handler + event = { + "input": { + "function_name": "test", + "function_code": "def test(): return 'test'", + "args": [], + "kwargs": {}, + } + } + + result = await loaded_handler(event) + + # Verify custom handler was executed + assert result["success"] is True + assert result["result"] == "custom_handler_executed" + assert result["stdout"] == "Custom handler stdout" + + finally: + # Cleanup + sys.path.remove(str(tmp_path)) + if "custom_test_handler" in sys.modules: + del sys.modules["custom_test_handler"] + + @pytest.mark.integration + def test_handler_loading_failure_propagation(self, monkeypatch): + """Test that loading errors propagate correctly.""" + import pytest + from handler import load_handler + + # Set non-existent module + monkeypatch.setenv("HANDLER_MODULE", "totally_nonexistent_module_xyz") + + # Should raise ImportError + with pytest.raises(ImportError) as exc_info: + load_handler() + + # Verify error message contains module name + assert "totally_nonexistent_module_xyz" in str(exc_info.value) + + @pytest.mark.integration + @pytest.mark.asyncio + async def test_handler_switch_between_modules(self, tmp_path, monkeypatch): + """Test switching between different handler modules.""" + import sys + from handler import load_handler + + # Create first custom handler + handler1_code = """ +from typing import Dict, Any + +async def handler(event: Dict[str, Any]) -> Dict[str, Any]: + return {"handler_id": "handler_1", "success": True} +""" + handler1_path = tmp_path / "test_handler_1.py" + handler1_path.write_text(handler1_code) + + # Create second custom handler + handler2_code = """ +from typing import Dict, Any + +async def handler(event: Dict[str, Any]) -> Dict[str, Any]: + return {"handler_id": "handler_2", "success": True} +""" + handler2_path = tmp_path / "test_handler_2.py" + handler2_path.write_text(handler2_code) + + sys.path.insert(0, str(tmp_path)) + + try: + # Load first handler + monkeypatch.setenv("HANDLER_MODULE", "test_handler_1") + handler1 = load_handler() + result1 = await handler1({}) + assert result1["handler_id"] == "handler_1" + + # Load second handler + monkeypatch.setenv("HANDLER_MODULE", "test_handler_2") + handler2 = load_handler() + result2 = await handler2({}) + assert result2["handler_id"] == "handler_2" + + # Verify they are different + assert result1["handler_id"] != result2["handler_id"] + + finally: + sys.path.remove(str(tmp_path)) + for mod in ["test_handler_1", "test_handler_2"]: + if mod in sys.modules: + del sys.modules[mod] diff --git a/tests/unit/test_handler.py b/tests/unit/test_handler.py index cc8940c..979dade 100644 --- a/tests/unit/test_handler.py +++ b/tests/unit/test_handler.py @@ -4,7 +4,7 @@ import base64 import cloudpickle from unittest.mock import patch, AsyncMock -from handler import handler +from live_serverless import handler from live_serverless.remote_execution import FunctionResponse diff --git a/tests/unit/test_handler_loader.py b/tests/unit/test_handler_loader.py new file mode 100644 index 0000000..da8889c --- /dev/null +++ b/tests/unit/test_handler_loader.py @@ -0,0 +1,189 @@ +"""Tests for dynamic handler loading in handler.py.""" + +import os +import pytest +from unittest.mock import patch + +from handler import load_handler + + +class TestLoadHandlerDefault: + """Test default handler loading behavior.""" + + def test_load_handler_default(self, capsys): + """Test loading the default live_serverless handler.""" + # Ensure HANDLER_MODULE is not set + with patch.dict(os.environ, {}, clear=True): + handler_func = load_handler() + + # Verify handler is callable + assert callable(handler_func) + + # Verify success message + captured = capsys.readouterr() + assert "Loaded handler from module: live_serverless" in captured.out + + def test_load_handler_returns_correct_type(self): + """Test that load_handler returns a callable.""" + with patch.dict(os.environ, {}, clear=True): + handler_func = load_handler() + + # Type check + assert callable(handler_func) + # Handler should accept dict and return dict + assert handler_func.__code__.co_argcount >= 1 + + +class TestLoadHandlerCustomModule: + """Test loading custom handler modules.""" + + def test_load_handler_custom_module(self, capsys, mock_handler_module): + """Test loading a custom handler module via environment variable.""" + with patch.dict(os.environ, {"HANDLER_MODULE": "custom_handler"}): + with patch( + "handler.importlib.import_module", return_value=mock_handler_module + ): + handler_func = load_handler() + + # Verify the correct handler was loaded + assert callable(handler_func) + assert handler_func == mock_handler_module.handler + + # Verify success message + captured = capsys.readouterr() + assert "Loaded handler from module: custom_handler" in captured.out + + def test_load_handler_nested_module_path(self, capsys, mock_handler_module): + """Test loading handler from nested module path.""" + with patch.dict(os.environ, {"HANDLER_MODULE": "handlers.custom.my_handler"}): + with patch( + "handler.importlib.import_module", return_value=mock_handler_module + ): + handler_func = load_handler() + + # Verify handler loaded successfully + assert callable(handler_func) + + # Verify correct module path in message + captured = capsys.readouterr() + assert ( + "Loaded handler from module: handlers.custom.my_handler" + in captured.out + ) + + +class TestLoadHandlerErrorHandling: + """Test error handling in handler loading.""" + + def test_load_handler_module_not_found(self, capsys): + """Test ImportError when module doesn't exist.""" + with patch.dict(os.environ, {"HANDLER_MODULE": "nonexistent_module"}): + with pytest.raises(ImportError): + load_handler() + + # Verify error message to stderr + captured = capsys.readouterr() + assert "Failed to import module 'nonexistent_module'" in captured.err + + def test_load_handler_missing_handler_attribute(self, capsys, mock_invalid_module): + """Test AttributeError when module doesn't have handler function.""" + with patch.dict(os.environ, {"HANDLER_MODULE": "invalid_module"}): + with patch( + "handler.importlib.import_module", return_value=mock_invalid_module + ): + with pytest.raises(AttributeError) as exc_info: + load_handler() + + # Verify error message + assert "does not export a 'handler' function" in str(exc_info.value) + + # Verify error printed to stderr + captured = capsys.readouterr() + assert "does not export a 'handler' function" in captured.err + + def test_load_handler_handler_not_callable(self, capsys, mock_non_callable_handler): + """Test TypeError when handler attribute is not callable.""" + with patch.dict(os.environ, {"HANDLER_MODULE": "non_callable_module"}): + with patch( + "handler.importlib.import_module", + return_value=mock_non_callable_handler, + ): + with pytest.raises(TypeError) as exc_info: + load_handler() + + # Verify error message + assert ( + "'handler' in module 'non_callable_module' is not callable" + in str(exc_info.value) + ) + + # Verify error printed to stderr + captured = capsys.readouterr() + assert "is not callable" in captured.err + + def test_load_handler_import_error_details(self, capsys): + """Test that ImportError details are preserved and reported.""" + error_message = "No module named 'missing_dependency'" + + with patch.dict(os.environ, {"HANDLER_MODULE": "broken_module"}): + with patch( + "handler.importlib.import_module", + side_effect=ImportError(error_message), + ): + with pytest.raises(ImportError) as exc_info: + load_handler() + + # Verify original error is preserved + assert error_message in str(exc_info.value) + + # Verify error details in stderr + captured = capsys.readouterr() + assert "Failed to import module 'broken_module'" in captured.err + assert error_message in captured.err + + +class TestLoadHandlerOutputVerification: + """Test output and logging behavior.""" + + def test_load_handler_prints_success_message(self, capsys, mock_handler_module): + """Test that successful loading prints message to stdout.""" + with patch.dict(os.environ, {"HANDLER_MODULE": "test_module"}): + with patch( + "handler.importlib.import_module", return_value=mock_handler_module + ): + load_handler() + + captured = capsys.readouterr() + assert captured.out.strip() == "Loaded handler from module: test_module" + + def test_load_handler_prints_error_to_stderr(self, capsys): + """Test that errors are printed to stderr, not stdout.""" + with patch.dict(os.environ, {"HANDLER_MODULE": "error_module"}): + with patch( + "handler.importlib.import_module", + side_effect=ImportError("test error"), + ): + with pytest.raises(ImportError): + load_handler() + + captured = capsys.readouterr() + # stdout should be empty + assert captured.out == "" + # stderr should contain error + assert "Error: Failed to import module 'error_module'" in captured.err + assert "test error" in captured.err + + def test_load_handler_env_var_precedence(self, capsys, mock_handler_module): + """Test that HANDLER_MODULE env var takes precedence over default.""" + # Set environment variable + with patch.dict(os.environ, {"HANDLER_MODULE": "priority_handler"}): + with patch( + "handler.importlib.import_module", return_value=mock_handler_module + ): + load_handler() + + captured = capsys.readouterr() + # Should NOT load live_serverless + assert "live_serverless" not in captured.out + # Should load priority_handler + assert "priority_handler" in captured.out From 0a9a00cdc6291f8ce0d2045b4d6ffc271f3e06e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Thu, 16 Oct 2025 00:07:18 -0700 Subject: [PATCH 21/22] feat: add dynamic handler loading and event protocol Implement HANDLER_MODULE env var for handler selection and HandlerEvent protocol for standardized event structure across all handlers. - New: handler_protocol.py with HandlerEvent/HandlerFunction - Dynamic import based on HANDLER_MODULE (default: live_serverless) - Backward compatible dict-to-HandlerEvent coercion --- src/handler.py | 7 +- src/handler_protocol.py | 34 +++++ src/live_serverless/__init__.py | 18 ++- tests/unit/test_handler.py | 83 ++++++++++ tests/unit/test_handler_loader.py | 37 +++++ tests/unit/test_handler_protocol.py | 229 ++++++++++++++++++++++++++++ 6 files changed, 402 insertions(+), 6 deletions(-) create mode 100644 src/handler_protocol.py create mode 100644 tests/unit/test_handler_protocol.py diff --git a/src/handler.py b/src/handler.py index 89a7949..017723a 100644 --- a/src/handler.py +++ b/src/handler.py @@ -1,12 +1,13 @@ import os import importlib import sys -from typing import Callable, Dict, Any, cast +from typing import cast import runpod +from handler_protocol import HandlerFunction -def load_handler() -> Callable[[Dict[str, Any]], Dict[str, Any]]: +def load_handler() -> HandlerFunction: """ Dynamically load the handler function from the specified module. @@ -37,7 +38,7 @@ def load_handler() -> Callable[[Dict[str, Any]], Dict[str, Any]]: ) print(f"Loaded handler from module: {handler_module_name}") - return cast(Callable[[Dict[str, Any]], Dict[str, Any]], handler_func) + return cast(HandlerFunction, handler_func) except ImportError as e: print( diff --git a/src/handler_protocol.py b/src/handler_protocol.py new file mode 100644 index 0000000..c26652c --- /dev/null +++ b/src/handler_protocol.py @@ -0,0 +1,34 @@ +""" +Universal handler protocol for RunPod serverless workers. + +All handlers (live_serverless, inference, training, etc.) should follow +this protocol to ensure compatibility with the dynamic handler loader. +""" + +from typing import Dict, Any, Awaitable, Callable +from pydantic import BaseModel, Field + + +class HandlerEvent(BaseModel): + """ + Standard event structure for all RunPod serverless handlers. + + RunPod wraps all inputs in an 'input' field. This model enforces + that structure while allowing each handler to define its own input schema. + + Example: + { + "input": { + "prompt": "Generate text...", + "model": "gpt-4" + } + } + """ + + input: Dict[str, Any] = Field(description="Handler-specific input data") + + model_config = {"extra": "allow"} # Allow RunPod metadata fields + + +# Type alias for standard handler function signature +HandlerFunction = Callable[[HandlerEvent], Awaitable[Dict[str, Any]]] diff --git a/src/live_serverless/__init__.py b/src/live_serverless/__init__.py index f66069e..2136808 100644 --- a/src/live_serverless/__init__.py +++ b/src/live_serverless/__init__.py @@ -1,5 +1,6 @@ -from typing import Dict, Any +from typing import Dict, Any, Union +from handler_protocol import HandlerEvent from .logger import setup_logging from .remote_execution import FunctionRequest, FunctionResponse from .remote_executor import RemoteExecutor @@ -8,15 +9,26 @@ setup_logging() -async def handler(event: Dict[str, Any]) -> Dict[str, Any]: +async def handler(event: Union[HandlerEvent, Dict[str, Any]]) -> Dict[str, Any]: """ RunPod serverless function handler with dependency installation. + + Args: + event: Handler event containing input data. Can be HandlerEvent or dict + for backward compatibility. + + Returns: + Dictionary containing execution results """ output: FunctionResponse try: + # Coerce dict to HandlerEvent if needed (backward compatibility) + if isinstance(event, dict): + event = HandlerEvent(**event) + executor = RemoteExecutor() - input_data = FunctionRequest(**event.get("input", {})) + input_data = FunctionRequest(**event.input) output = await executor.ExecuteFunction(input_data) except Exception as error: diff --git a/tests/unit/test_handler.py b/tests/unit/test_handler.py index 979dade..6980cb9 100644 --- a/tests/unit/test_handler.py +++ b/tests/unit/test_handler.py @@ -6,6 +6,7 @@ from unittest.mock import patch, AsyncMock from live_serverless import handler from live_serverless.remote_execution import FunctionResponse +from handler_protocol import HandlerEvent class TestHandler: @@ -147,3 +148,85 @@ async def test_handler_class_execution(self): assert result["success"] is True assert "instance_id" in result assert "instance_info" in result + + +class TestHandlerWithHandlerEvent: + """Test handler with HandlerEvent model.""" + + @pytest.mark.asyncio + async def test_handler_with_handler_event_object(self): + """Test handler accepts HandlerEvent objects.""" + event = HandlerEvent( + input={ + "function_name": "test_func", + "function_code": "def test_func(): return 'success'", + "args": [], + "kwargs": {}, + } + ) + + with patch("live_serverless.RemoteExecutor") as mock_executor_class: + mock_executor = AsyncMock() + mock_executor_class.return_value = mock_executor + mock_executor.ExecuteFunction.return_value = FunctionResponse( + success=True, + result=base64.b64encode(cloudpickle.dumps("success")).decode("utf-8"), + stdout="Function executed successfully", + ) + + result = await handler(event) + + assert result["success"] is True + assert "result" in result + + @pytest.mark.asyncio + async def test_handler_backward_compatible_with_dict(self): + """Test handler still works with plain dict (backward compatibility).""" + # Plain dict should be coerced to HandlerEvent + event = { + "input": { + "function_name": "test_func", + "function_code": "def test_func(): return 'test'", + "args": [], + "kwargs": {}, + } + } + + with patch("live_serverless.RemoteExecutor") as mock_executor_class: + mock_executor = AsyncMock() + mock_executor_class.return_value = mock_executor + mock_executor.ExecuteFunction.return_value = FunctionResponse( + success=True, + result=base64.b64encode(cloudpickle.dumps("test")).decode("utf-8"), + ) + + result = await handler(event) + + assert result["success"] is True + + @pytest.mark.asyncio + async def test_handler_event_with_extra_fields(self): + """Test handler works with HandlerEvent containing extra fields.""" + event = HandlerEvent( + input={ + "function_name": "test_func", + "function_code": "def test_func(): return 'test'", + "args": [], + "kwargs": {}, + }, + job_id="job-123", + worker_id="worker-456", + ) + + with patch("live_serverless.RemoteExecutor") as mock_executor_class: + mock_executor = AsyncMock() + mock_executor_class.return_value = mock_executor + mock_executor.ExecuteFunction.return_value = FunctionResponse( + success=True, + result=base64.b64encode(cloudpickle.dumps("test")).decode("utf-8"), + ) + + result = await handler(event) + + assert result["success"] is True + # Extra fields shouldn't interfere with handler execution diff --git a/tests/unit/test_handler_loader.py b/tests/unit/test_handler_loader.py index da8889c..fd5c11d 100644 --- a/tests/unit/test_handler_loader.py +++ b/tests/unit/test_handler_loader.py @@ -187,3 +187,40 @@ def test_load_handler_env_var_precedence(self, capsys, mock_handler_module): assert "live_serverless" not in captured.out # Should load priority_handler assert "priority_handler" in captured.out + + +class TestHandlerFunctionType: + """Test HandlerFunction type compatibility.""" + + def test_load_handler_returns_handler_function_type(self): + """Test that load_handler returns a HandlerFunction compatible callable.""" + with patch.dict(os.environ, {}, clear=True): + handler_func = load_handler() + + # Verify it's callable + assert callable(handler_func) + + # Verify it's an async function (coroutine function) + import inspect + + assert inspect.iscoroutinefunction(handler_func) + + def test_loaded_handler_signature_matches_protocol(self, mock_handler_module): + """Test that loaded handler has correct signature for HandlerFunction.""" + with patch.dict(os.environ, {"HANDLER_MODULE": "test_module"}): + with patch( + "handler.importlib.import_module", return_value=mock_handler_module + ): + handler_func = load_handler() + + # Check signature + import inspect + + sig = inspect.signature(handler_func) + params = list(sig.parameters.keys()) + + # Should have at least one parameter (event) + assert len(params) >= 1 + + # Should be async + assert inspect.iscoroutinefunction(handler_func) diff --git a/tests/unit/test_handler_protocol.py b/tests/unit/test_handler_protocol.py new file mode 100644 index 0000000..f2f4a62 --- /dev/null +++ b/tests/unit/test_handler_protocol.py @@ -0,0 +1,229 @@ +"""Tests for the universal handler protocol.""" + +import pytest +from pydantic import ValidationError + + +class TestHandlerEvent: + """Test HandlerEvent model validation and behavior.""" + + def test_handler_event_with_valid_input(self): + """Test creating HandlerEvent with valid input.""" + from handler_protocol import HandlerEvent + + event_data = {"input": {"function_name": "test", "args": []}} + event = HandlerEvent(**event_data) + + assert event.input == {"function_name": "test", "args": []} + assert isinstance(event.input, dict) + + def test_handler_event_with_complex_input(self): + """Test HandlerEvent with complex nested input.""" + from handler_protocol import HandlerEvent + + event_data = { + "input": { + "function_name": "process", + "args": [1, 2, 3], + "kwargs": {"key": "value"}, + "nested": {"deep": {"data": "test"}}, + } + } + event = HandlerEvent(**event_data) + + assert event.input["function_name"] == "process" + assert event.input["nested"]["deep"]["data"] == "test" + + def test_handler_event_missing_input_field(self): + """Test that HandlerEvent requires input field.""" + from handler_protocol import HandlerEvent + + event_data = {"wrong_field": "data"} + + with pytest.raises(ValidationError) as exc_info: + HandlerEvent(**event_data) + + assert "input" in str(exc_info.value) + + def test_handler_event_with_empty_input(self): + """Test HandlerEvent with empty input dict.""" + from handler_protocol import HandlerEvent + + event_data = {"input": {}} + event = HandlerEvent(**event_data) + + assert event.input == {} + + def test_handler_event_with_extra_fields(self): + """Test that HandlerEvent allows extra fields (RunPod metadata).""" + from handler_protocol import HandlerEvent + + event_data = { + "input": {"data": "test"}, + "id": "job-123", + "job_id": "runpod-job-456", + } + event = HandlerEvent(**event_data) + + assert event.input == {"data": "test"} + # Extra fields should be accessible + assert hasattr(event, "id") + assert event.id == "job-123" + + def test_handler_event_serialization(self): + """Test HandlerEvent can be serialized to dict.""" + from handler_protocol import HandlerEvent + + event_data = {"input": {"key": "value"}, "job_id": "123"} + event = HandlerEvent(**event_data) + + serialized = event.model_dump() + assert serialized["input"] == {"key": "value"} + assert serialized["job_id"] == "123" + + def test_handler_event_deserialization(self): + """Test HandlerEvent can be created from dict.""" + from handler_protocol import HandlerEvent + + event_dict = {"input": {"function": "test"}} + event = HandlerEvent.model_validate(event_dict) + + assert event.input["function"] == "test" + + def test_handler_event_json_serialization(self): + """Test HandlerEvent can be serialized to/from JSON.""" + from handler_protocol import HandlerEvent + import json + + event_data = {"input": {"data": [1, 2, 3]}} + event = HandlerEvent(**event_data) + + # Serialize to JSON + json_str = event.model_dump_json() + parsed = json.loads(json_str) + + assert parsed["input"]["data"] == [1, 2, 3] + + # Deserialize from JSON + event2 = HandlerEvent.model_validate_json(json_str) + assert event2.input == event.input + + def test_handler_event_dict_coercion(self): + """Test that plain dict can be coerced to HandlerEvent (backward compat).""" + from handler_protocol import HandlerEvent + + # This simulates what happens when a function expecting HandlerEvent + # receives a plain dict (Pydantic will coerce it) + plain_dict = {"input": {"test": "data"}} + + # Direct instantiation works + event = HandlerEvent(**plain_dict) + assert event.input == {"test": "data"} + + def test_handler_event_input_not_dict(self): + """Test that input must be a dict.""" + from handler_protocol import HandlerEvent + + event_data = {"input": "not a dict"} + + with pytest.raises(ValidationError) as exc_info: + HandlerEvent(**event_data) + + assert "input" in str(exc_info.value) + + +class TestHandlerFunctionType: + """Test HandlerFunction type alias.""" + + def test_handler_function_type_exists(self): + """Test that HandlerFunction type alias is defined.""" + from handler_protocol import HandlerFunction + from typing import get_origin + + # Verify it's a Callable type + assert get_origin(HandlerFunction) is not None + + def test_handler_function_signature_validation(self): + """Test that handler functions can be type-checked against HandlerFunction.""" + from handler_protocol import HandlerEvent + from typing import Dict, Any + + # Define a valid handler function + async def valid_handler(event: HandlerEvent) -> Dict[str, Any]: + return {"success": True} + + # This is a compile-time check, but we can verify the signature exists + assert callable(valid_handler) + assert valid_handler.__annotations__["event"] == HandlerEvent + assert valid_handler.__annotations__["return"] == Dict[str, Any] + + +class TestHandlerEventUsagePatterns: + """Test common usage patterns with HandlerEvent.""" + + def test_handler_event_with_live_serverless_input(self): + """Test HandlerEvent with live_serverless-style input.""" + from handler_protocol import HandlerEvent + + event_data = { + "input": { + "function_name": "hello_world", + "function_code": "def hello_world(): return 'hello'", + "args": [], + "kwargs": {}, + } + } + event = HandlerEvent(**event_data) + + assert event.input["function_name"] == "hello_world" + assert event.input["function_code"] is not None + + def test_handler_event_with_inference_input(self): + """Test HandlerEvent with inference-style input.""" + from handler_protocol import HandlerEvent + + event_data = { + "input": { + "prompt": "Generate a story", + "model": "gpt-4", + "temperature": 0.7, + "max_tokens": 100, + } + } + event = HandlerEvent(**event_data) + + assert event.input["prompt"] == "Generate a story" + assert event.input["model"] == "gpt-4" + assert event.input["temperature"] == 0.7 + + def test_handler_event_with_training_input(self): + """Test HandlerEvent with training-style input.""" + from handler_protocol import HandlerEvent + + event_data = { + "input": { + "dataset_uri": "s3://bucket/dataset.csv", + "model_config": {"learning_rate": 0.001, "epochs": 10}, + "output_uri": "s3://bucket/model/", + } + } + event = HandlerEvent(**event_data) + + assert event.input["dataset_uri"].startswith("s3://") + assert event.input["model_config"]["epochs"] == 10 + + def test_handler_event_access_patterns(self): + """Test different ways to access HandlerEvent data.""" + from handler_protocol import HandlerEvent + + event_data = {"input": {"key1": "value1", "key2": "value2"}} + event = HandlerEvent(**event_data) + + # Dict-style access on input + assert event.input["key1"] == "value1" + assert event.input.get("key2") == "value2" + assert event.input.get("missing", "default") == "default" + + # Iteration + assert list(event.input.keys()) == ["key1", "key2"] + assert "key1" in event.input From cdcc940d03eeb8c5d393ea303b5926a8363c0d72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dean=20Qui=C3=B1anola?= Date: Wed, 12 Nov 2025 11:24:02 -0800 Subject: [PATCH 22/22] docs: Dynamic Handler Loading architecture --- docs/Dynamic_Handler_Loading.md | 450 ++++++++++++++++++++++++++++++++ 1 file changed, 450 insertions(+) create mode 100644 docs/Dynamic_Handler_Loading.md diff --git a/docs/Dynamic_Handler_Loading.md b/docs/Dynamic_Handler_Loading.md new file mode 100644 index 0000000..bc73b5a --- /dev/null +++ b/docs/Dynamic_Handler_Loading.md @@ -0,0 +1,450 @@ +# Dynamic Handler Loading + +This document explains the dynamic handler loading system that allows a single worker container to support multiple handler implementations through runtime configuration. + +## Overview + +The dynamic handler loading feature enables you to: + +- Deploy multiple handler types using the same Docker image +- Switch handler implementations via environment variable +- Create custom handlers following a standard protocol +- Test handlers locally before deployment +- Maintain backward compatibility with existing handlers + +## Architecture + +```mermaid +graph TB + Start([Container Start]) --> LoadEnv[Read HANDLER_MODULE env var] + LoadEnv --> CheckEnv{Env var set?} + CheckEnv -->|No| DefaultModule[Use 'live_serverless'] + CheckEnv -->|Yes| CustomModule[Use specified module] + DefaultModule --> Import[Import module via importlib] + CustomModule --> Import + Import --> Validate{Module has
'handler' function?} + Validate -->|No| Error([Raise AttributeError]) + Validate -->|Yes| CheckCallable{Handler is
callable?} + CheckCallable -->|No| Error + CheckCallable -->|Yes| TypeCast[Cast to HandlerFunction type] + TypeCast --> Register[Register with RunPod SDK] + Register --> Ready([Ready to process jobs]) + + style Start fill:#1976d2,stroke:#0d47a1,stroke-width:3px,color:#fff + style Ready fill:#388e3c,stroke:#1b5e20,stroke-width:3px,color:#fff + style Error fill:#d32f2f,stroke:#b71c1c,stroke-width:3px,color:#fff + style Import fill:#0288d1,stroke:#01579b,stroke-width:2px,color:#fff + style Register fill:#0288d1,stroke:#01579b,stroke-width:2px,color:#fff + style DefaultModule fill:#f57c00,stroke:#e65100,stroke-width:2px,color:#fff + style CustomModule fill:#f57c00,stroke:#e65100,stroke-width:2px,color:#fff +``` + +## How It Works + +### Loading Flow + +1. **Environment Check**: [src/handler.py:21](../src/handler.py#L21) reads `HANDLER_MODULE` environment variable +2. **Default Fallback**: If not set, defaults to `live_serverless` module +3. **Dynamic Import**: Uses `importlib.import_module()` to load the specified module +4. **Validation**: Ensures the module exports a callable `handler` function +5. **Registration**: Passes the handler to `runpod.serverless.start()` + +### Handler Protocol + +All handlers must follow this protocol (defined in [src/handler_protocol.py](../src/handler_protocol.py)): + +**Function Signature**: +```python +async def handler(event: HandlerEvent) -> Dict[str, Any]: + """Process a job and return results.""" + pass +``` + +**Requirements**: +- Must be an async function (coroutine) +- Must accept `HandlerEvent` or `Dict[str, Any]` +- Must return `Dict[str, Any]` +- Must be exported as module-level `handler` function + +## Configuration + +### Environment Variable + +**`HANDLER_MODULE`**: Specifies which Python module contains the handler function. + +**Format**: Python module path (dot-separated) + +**Examples**: +- `HANDLER_MODULE=live_serverless` (default) +- `HANDLER_MODULE=custom_handler` +- `HANDLER_MODULE=handlers.inference.llm_handler` +- `HANDLER_MODULE=my_package.workers.batch_processor` + +### Setting the Handler + +**Local Development**: +```bash +# Use default handler +python src/handler.py + +# Use custom handler +HANDLER_MODULE=custom_handler python src/handler.py + +# Use nested module +HANDLER_MODULE=handlers.inference.llm_handler python src/handler.py +``` + +**Docker Build Time**: +```bash +# Not currently supported - see TODO.md task #2 +# Will be supported in future release via ARG +``` + +**Docker Runtime**: +```bash +# Override at container startup +docker run -e HANDLER_MODULE=custom_handler runpod/tetra-rp:latest + +# With RunPod +# Set environment variable in endpoint configuration +``` + +**Testing**: +```bash +# Test with custom handler +HANDLER_MODULE=custom_handler make test-handler + +# Test specific JSON file +HANDLER_MODULE=custom_handler python src/handler.py --test_input "$(cat src/tests/test_input.json)" +``` + +## Creating Custom Handlers + +### Minimal Handler + +```python +# my_handler.py +from typing import Dict, Any +from handler_protocol import HandlerEvent + +async def handler(event: HandlerEvent) -> Dict[str, Any]: + """Minimal handler that echoes input.""" + return { + "success": True, + "result": event.input, + "error": None + } +``` + +### Handler with Error Handling + +```python +# robust_handler.py +from typing import Dict, Any +import traceback +from handler_protocol import HandlerEvent + +async def handler(event: HandlerEvent) -> Dict[str, Any]: + """Handler with comprehensive error handling.""" + try: + # Extract input + user_input = event.input.get("data") + + # Validate + if not user_input: + return { + "success": False, + "error": "Missing 'data' field in input", + "result": None + } + + # Process + result = process_data(user_input) + + # Return success + return { + "success": True, + "result": result, + "error": None + } + + except Exception as e: + return { + "success": False, + "error": f"{type(e).__name__}: {str(e)}", + "traceback": traceback.format_exc(), + "result": None + } + +def process_data(data): + """Business logic implementation.""" + return {"processed": data} +``` + +### Handler with Backward Compatibility + +```python +# compatible_handler.py +from typing import Dict, Any, Union +from handler_protocol import HandlerEvent + +async def handler(event: Union[HandlerEvent, Dict[str, Any]]) -> Dict[str, Any]: + """Handler supporting both HandlerEvent and raw dict input.""" + # Coerce dict to HandlerEvent if needed + if isinstance(event, dict): + event = HandlerEvent(**event) + + # Process normally + return { + "success": True, + "result": event.input.get("value"), + "error": None + } +``` + +## Testing Custom Handlers + +### Unit Testing + +```python +# test_my_handler.py +import pytest +from my_handler import handler +from handler_protocol import HandlerEvent + +@pytest.mark.asyncio +async def test_handler_success(): + """Test successful handler execution.""" + event = HandlerEvent(input={"data": "test"}) + result = await handler(event) + + assert result["success"] is True + assert result["error"] is None + assert result["result"] is not None + +@pytest.mark.asyncio +async def test_handler_error(): + """Test handler error handling.""" + event = HandlerEvent(input={}) # Missing required data + result = await handler(event) + + assert result["success"] is False + assert result["error"] is not None +``` + +### Integration Testing + +```bash +# Create test input file +cat > test_my_handler.json << EOF +{ + "input": { + "data": "integration test" + } +} +EOF + +# Test handler with file +HANDLER_MODULE=my_handler python src/handler.py --test_input "$(cat test_my_handler.json)" +``` + +### Using Test Script + +```bash +# Place test JSON files in src/tests/ +cp test_my_handler.json src/tests/ + +# Run all tests with custom handler +HANDLER_MODULE=my_handler ./src/test-handler.sh +``` + +## Deployment + +### Step 1: Prepare Handler Module + +```bash +# Add your handler to src/ directory +cp my_handler.py src/ + +# Or create handlers package +mkdir -p src/handlers +cp my_handler.py src/handlers/ +``` + +### Step 2: Test Locally + +```bash +# Test with your handler +HANDLER_MODULE=my_handler make test-handler + +# Or if using package structure +HANDLER_MODULE=handlers.my_handler make test-handler +``` + +### Step 3: Build Docker Image + +```bash +# Build with your handler code included +make build + +# Image will contain all handlers in src/ +``` + +### Step 4: Deploy to RunPod + +**Option A: Environment Variable (Recommended)** +1. Create endpoint in RunPod dashboard +2. Set environment variable: `HANDLER_MODULE=my_handler` +3. Deploy using `runpod/tetra-rp:latest` image + +**Option B: Custom Image** +1. Push custom image with your handler +2. Configure endpoint to use your image +3. Set `HANDLER_MODULE` if handler is not named `live_serverless` + +### Step 5: Verify Deployment + +```bash +# Test deployed endpoint +runpod run --endpoint-id YOUR_ENDPOINT_ID --input '{"data": "test"}' +``` + +## Handler Examples + +See [docs/examples/](./examples/) for complete working examples: + +- [simple_echo_handler.py](./examples/simple_echo_handler.py) - Minimal implementation +- [inference_handler.py](./examples/inference_handler.py) - LLM inference pattern +- [batch_processing_handler.py](./examples/batch_processing_handler.py) - Batch data processing + +## Troubleshooting + +### Module Not Found + +**Error**: `ImportError: No module named 'my_handler'` + +**Solutions**: +- Ensure handler file is in `src/` directory or Python path +- Check module name matches file name (without `.py`) +- For packages, ensure `__init__.py` exists +- Verify module path uses dots not slashes: `handlers.my_handler` not `handlers/my_handler` + +### Handler Not Found + +**Error**: `AttributeError: Module 'my_handler' does not export a 'handler' function` + +**Solutions**: +- Ensure function is named exactly `handler` (lowercase) +- Check function is defined at module level (not inside class or another function) +- Verify function is not commented out or conditionally defined + +### Handler Not Callable + +**Error**: `TypeError: 'handler' in module 'my_handler' is not callable` + +**Solutions**: +- Ensure `handler` is a function, not a variable or constant +- Check you're not assigning to `handler` after defining it +- Verify function definition is complete (no syntax errors) + +### Handler Type Errors + +**Error**: Handler executes but returns wrong type + +**Solutions**: +- Ensure handler returns `Dict[str, Any]`, not other types +- Use type hints to catch issues: `-> Dict[str, Any]` +- Check return statement returns dictionary with `success`, `result`, `error` keys + +### Import Errors in Handler + +**Error**: Handler imports fail when loaded + +**Solutions**: +- Install dependencies in Docker image via `pyproject.toml` +- For dynamic dependencies, use `live_serverless` handler's dependency installation +- Check Python path includes handler's package location + +## Best Practices + +### Handler Design + +1. **Single Responsibility**: Each handler should have one clear purpose +2. **Error Handling**: Always catch and return exceptions, never let them propagate +3. **Input Validation**: Validate all inputs before processing +4. **Consistent Response**: Always return dict with `success`, `result`, `error` keys +5. **Logging**: Use structured logging, not print statements (unless in `live_serverless`) +6. **Async/Await**: Use async properly, don't block the event loop + +### Code Organization + +1. **Module Structure**: Use packages for complex handlers (`handlers/inference/`) +2. **Separation**: Keep handler logic separate from business logic +3. **Reusability**: Extract common code to shared modules +4. **Testing**: Write unit tests for handler and integration tests for full flow + +### Performance + +1. **Initialization**: Load models/resources outside handler function if possible +2. **Caching**: Cache expensive operations when safe +3. **Cleanup**: Properly close connections and release resources +4. **Timeout**: Handle long-running operations with timeouts + +### Security + +1. **Input Sanitization**: Never trust user input +2. **Secrets**: Use environment variables, not hardcoded values +3. **Validation**: Validate types and ranges for all inputs +4. **Sandboxing**: Be aware handlers execute arbitrary code in `live_serverless` + +## Migration Guide + +### From Legacy Handler + +**Before** (old `handler.py` approach): +```python +# handler.py +def handler(event): + result = process(event["input"]) + return {"output": result} + +if __name__ == "__main__": + import runpod + runpod.serverless.start({"handler": handler}) +``` + +**After** (using dynamic loading): +```python +# my_legacy_handler.py +async def handler(event): + # Make async + result = await process_async(event["input"]) + return {"output": result} + +# No runpod.serverless.start needed +``` + +**Usage**: +```bash +HANDLER_MODULE=my_legacy_handler python src/handler.py +``` + +### Gradual Migration + +1. Create new handler file alongside existing code +2. Test with `HANDLER_MODULE` locally +3. Deploy with environment variable override +4. Validate in production +5. Update default or remove old handler + +## Technical Reference + +For detailed protocol specifications, see [Handler_Protocol_Reference.md](./Handler_Protocol_Reference.md). + +## Related Documentation + +- [Handler Protocol Reference](./Handler_Protocol_Reference.md) - Protocol specifications +- [CLAUDE.md](../CLAUDE.md) - Project architecture and development guide +- [TODO.md](../TODO.md) - Future enhancements and roadmap +- [src/handler.py](../src/handler.py#L10-L57) - Implementation details +- [tests/unit/test_handler_loader.py](../tests/unit/test_handler_loader.py) - Test examples