From 96dd24ee4cafd488101e8968a5cd924cc7c568b2 Mon Sep 17 00:00:00 2001 From: Alexander-Dubrawski Date: Thu, 3 Sep 2020 09:25:48 +0200 Subject: [PATCH 1/7] Add first draft --- .../continuous_job_handler.py | 152 ++++++++++++++++ .../test_continous_job_handler.py | 169 ++++++++++++++++++ 2 files changed, 321 insertions(+) create mode 100644 hyrisecockpit/database_manager/continuous_job_handler.py create mode 100644 tests/database_manager/test_continous_job_handler.py diff --git a/hyrisecockpit/database_manager/continuous_job_handler.py b/hyrisecockpit/database_manager/continuous_job_handler.py new file mode 100644 index 000000000..18e3fe73f --- /dev/null +++ b/hyrisecockpit/database_manager/continuous_job_handler.py @@ -0,0 +1,152 @@ +"""This module handles the continuous jobs.""" + +from multiprocessing import Value + +from apscheduler.schedulers.background import BackgroundScheduler + +from .cursor import ConnectionFactory, StorageConnectionFactory +from .job.ping_hyrise import ping_hyrise +from .job.update_chunks_data import update_chunks_data +from .job.update_plugin_log import update_plugin_log +from .job.update_queue_length import update_queue_length +from .job.update_segment_configuration import update_segment_configuration +from .job.update_storage_data import update_storage_data +from .job.update_system_data import update_system_data +from .job.update_workload_operator_information import ( + update_workload_operator_information, +) +from .job.update_workload_statement_information import ( + update_workload_statement_information, +) +from .worker_pool import WorkerPool + + +class ContinuousJobHandler: + """Continuous Job Handler.""" + + def __init__( + self, + connection_factory: ConnectionFactory, + hyrise_active: Value, + worker_pool: WorkerPool, + storage_connection_factory: StorageConnectionFactory, + database_blocked: Value, + ): + """Initialize continuous Job Handler.""" + self._connection_factory = connection_factory + self._hyrise_active = hyrise_active + self._worker_pool = worker_pool + self._storage_connection_factory = storage_connection_factory + self._database_blocked: Value = database_blocked + self._previous_system_data = { + "previous_system_usage": None, + "previous_process_usage": None, + } + self._previous_chunk_data = { + "value": None, + } + self._scheduler: BackgroundScheduler = BackgroundScheduler() + self._init_jobs() + + def _init_jobs(self) -> None: + """Initialize basic background jobs.""" + self._ping_hyrise_job = self._scheduler.add_job( + func=ping_hyrise, + trigger="interval", + seconds=0.5, + args=(self._connection_factory, self._hyrise_active), + ) + self._update_queue_length_job = self._scheduler.add_job( + func=update_queue_length, + trigger="interval", + seconds=1, + args=(self._worker_pool, self._storage_connection_factory), + ) + self._update_system_data_job = self._scheduler.add_job( + func=update_system_data, + trigger="interval", + seconds=1, + args=( + self._database_blocked, + self._connection_factory, + self._storage_connection_factory, + self._previous_system_data, + ), + ) + self._update_storage_data_job = self._scheduler.add_job( + func=update_storage_data, + trigger="interval", + seconds=5, + args=( + self._database_blocked, + self._connection_factory, + self._storage_connection_factory, + ), + ) + self._update_plugin_log_job = self._scheduler.add_job( + func=update_plugin_log, + trigger="interval", + seconds=1, + args=( + self._database_blocked, + self._connection_factory, + self._storage_connection_factory, + ), + ) + self._update_chunks_data_job = self._scheduler.add_job( + func=update_chunks_data, + trigger="interval", + seconds=5, + args=( + self._database_blocked, + self._connection_factory, + self._storage_connection_factory, + self._previous_chunk_data, + ), + ) + self._update_workload_statement_information_job = self._scheduler.add_job( + func=update_workload_statement_information, + trigger="interval", + seconds=5, + args=( + self._database_blocked, + self._connection_factory, + self._storage_connection_factory, + ), + ) + self._update_workload_operator_information_job = self._scheduler.add_job( + func=update_workload_operator_information, + trigger="interval", + seconds=5, + args=( + self._database_blocked, + self._connection_factory, + self._storage_connection_factory, + ), + ) + self._update_segment_configuration_job = self._scheduler.add_job( + func=update_segment_configuration, + trigger="interval", + seconds=5, + args=( + self._database_blocked, + self._connection_factory, + self._storage_connection_factory, + ), + ) + + def start(self) -> None: + """Start background scheduler.""" + self._scheduler.start() + + def close(self) -> None: + """Close background scheduler.""" + self._update_workload_statement_information_job.remove() + self._update_system_data_job.remove() + self._update_chunks_data_job.remove() + self._update_storage_data_job.remove() + self._update_plugin_log_job.remove() + self._update_queue_length_job.remove() + self._update_workload_operator_information_job.remove() + self._ping_hyrise_job.remove() + self._scheduler.shutdown() diff --git a/tests/database_manager/test_continous_job_handler.py b/tests/database_manager/test_continous_job_handler.py new file mode 100644 index 000000000..687ce64ce --- /dev/null +++ b/tests/database_manager/test_continous_job_handler.py @@ -0,0 +1,169 @@ +# noqa +# type: ignore +# flake8: noqa +"""Test for continuous job handler.""" + +from typing import Any, Callable, Tuple +from unittest.mock import MagicMock, call, patch + +from hyrisecockpit.database_manager.continuous_job_handler import ContinuousJobHandler +from hyrisecockpit.database_manager.job.ping_hyrise import ping_hyrise +from hyrisecockpit.database_manager.job.update_chunks_data import update_chunks_data +from hyrisecockpit.database_manager.job.update_plugin_log import update_plugin_log +from hyrisecockpit.database_manager.job.update_queue_length import update_queue_length +from hyrisecockpit.database_manager.job.update_storage_data import update_storage_data +from hyrisecockpit.database_manager.job.update_system_data import update_system_data +from hyrisecockpit.database_manager.job.update_workload_operator_information import ( + update_workload_operator_information, +) +from hyrisecockpit.database_manager.job.update_workload_statement_information import ( + update_workload_statement_information, +) + + +class TestContinuousJobHandler: + """This class test the continuous job handler.""" + + @patch("hyrisecockpit.database_manager.continuous_job_handler.BackgroundScheduler") + def test_inintializes_continuous_job_handler( + self, mock_background_scheduler: MagicMock + ) -> None: + """Test if continuous job handler is created correctly.""" + mock_background_scheduler_obj = MagicMock() + mock_background_scheduler.return_value = mock_background_scheduler_obj + continuous_job_handler = ContinuousJobHandler( + "connection_factory", + "hyrise_active", + "worker_pool", + "storage_connection_factory", + "database_blocked", + ) + + assert continuous_job_handler._connection_factory == "connection_factory" + assert continuous_job_handler._hyrise_active == "hyrise_active" + assert continuous_job_handler._worker_pool == "worker_pool" + assert ( + continuous_job_handler._storage_connection_factory + == "storage_connection_factory" + ) + assert continuous_job_handler._previous_system_data == { + "previous_system_usage": None, + "previous_process_usage": None, + } + assert continuous_job_handler._previous_chunk_data == { + "value": None, + } + assert continuous_job_handler._scheduler == mock_background_scheduler_obj + + @patch( + "hyrisecockpit.database_manager.continuous_job_handler.BackgroundScheduler", + MagicMock(), + ) + def test_initializes_background_scheduler_job(self) -> None: + """Test initialization of background scheduler job.""" + continuous_job_handler = ContinuousJobHandler( + "connection_factory", + "hyrise_active", + "worker_pool", + "storage_connection_factory", + "database_blocked", + ) + mock_scheduler: MagicMock = MagicMock() + mock_scheduler.add_job.return_value = None + continuous_job_handler._scheduler = mock_scheduler + + jobs: Tuple[Callable[..., Any], str, int] = [ + ( + update_queue_length, + "interval", + 1, + ( + continuous_job_handler._worker_pool, + continuous_job_handler._hyrise_active, + ), + ), + ( + update_workload_statement_information, + "interval", + 5, + (continuous_job_handler._storage_connection_factory,), + ), + ( + update_chunks_data, + "interval", + 5, + ( + continuous_job_handler._database_blocked, + continuous_job_handler._connection_factory, + continuous_job_handler._storage_connection_factory, + ), + ), + ( + update_system_data, + "interval", + 1, + ( + continuous_job_handler._database_blocked, + continuous_job_handler._connection_factory, + continuous_job_handler._storage_connection_factory, + ), + ), + ( + update_storage_data, + "interval", + 5, + ( + continuous_job_handler._database_blocked, + continuous_job_handler._connection_factory, + continuous_job_handler._storage_connection_factory, + ), + ), + ( + update_plugin_log, + "interval", + 1, + ( + continuous_job_handler._database_blocked, + continuous_job_handler._connection_factory, + continuous_job_handler._storage_connection_factory, + ), + ), + ( + update_workload_statement_information, + "interval", + 1, + ( + continuous_job_handler._database_blocked, + continuous_job_handler._connection_factory, + continuous_job_handler._storage_connection_factory, + ), + ), + ( + update_workload_operator_information, + "interval", + 1, + ( + continuous_job_handler._database_blocked, + continuous_job_handler._connection_factory, + continuous_job_handler._storage_connection_factory, + ), + ), + ( + ping_hyrise, + "interval", + 0.5, + ( + continuous_job_handler._connection_factory, + continuous_job_handler._hyrise_active, + ), + ), + ] + + expected = [ + call.add_job(func=job[0], trigger=job[1], seconds=job[2], args=job[3]) + for job in jobs + ] + + continuous_job_handler._init_jobs() + + mock_scheduler.mock_calls == expected From cb9f4f569481e4300630d882b1aac46f9178b2af Mon Sep 17 00:00:00 2001 From: Alexander-Dubrawski Date: Thu, 3 Sep 2020 09:39:50 +0200 Subject: [PATCH 2/7] Adjust background scheduler --- .../database_manager/background_scheduler.py | 127 ++-------------- .../test_background_scheduler.py | 141 +----------------- .../test_continous_job_handler.py | 33 ++++ 3 files changed, 49 insertions(+), 252 deletions(-) diff --git a/hyrisecockpit/database_manager/background_scheduler.py b/hyrisecockpit/database_manager/background_scheduler.py index 50ef290ac..4f58220fa 100644 --- a/hyrisecockpit/database_manager/background_scheduler.py +++ b/hyrisecockpit/database_manager/background_scheduler.py @@ -4,24 +4,12 @@ from apscheduler.schedulers.background import BackgroundScheduler +from .continuous_job_handler import ContinuousJobHandler from .cursor import ConnectionFactory, StorageConnectionFactory from .job.activate_plugin import activate_plugin as activate_plugin_job from .job.deactivate_plugin import deactivate_plugin as deactivate_plugin_job from .job.delete_tables import delete_tables as delete_tables_job from .job.load_tables import load_tables as load_tables_job -from .job.ping_hyrise import ping_hyrise -from .job.update_chunks_data import update_chunks_data -from .job.update_plugin_log import update_plugin_log -from .job.update_queue_length import update_queue_length -from .job.update_segment_configuration import update_segment_configuration -from .job.update_storage_data import update_storage_data -from .job.update_system_data import update_system_data -from .job.update_workload_operator_information import ( - update_workload_operator_information, -) -from .job.update_workload_statement_information import ( - update_workload_statement_information, -) from .worker_pool import WorkerPool @@ -47,118 +35,25 @@ def __init__( self._workload_drivers = workload_drivers self._scheduler: BackgroundScheduler = BackgroundScheduler() self._hyrise_active: Value = hyrise_active - self._previous_system_data = { - "previous_system_usage": None, - "previous_process_usage": None, - } - self._previous_chunk_data = { - "value": None, - } - - self._init_jobs() - - def _init_jobs(self) -> None: - """Initialize basic background jobs.""" - self._ping_hyrise_job = self._scheduler.add_job( - func=ping_hyrise, - trigger="interval", - seconds=0.5, - args=(self._connection_factory, self._hyrise_active), - ) - self._update_queue_length_job = self._scheduler.add_job( - func=update_queue_length, - trigger="interval", - seconds=1, - args=(self._worker_pool, self._storage_connection_factory), - ) - self._update_system_data_job = self._scheduler.add_job( - func=update_system_data, - trigger="interval", - seconds=1, - args=( - self._database_blocked, - self._connection_factory, - self._storage_connection_factory, - self._previous_system_data, - ), - ) - self._update_storage_data_job = self._scheduler.add_job( - func=update_storage_data, - trigger="interval", - seconds=5, - args=( - self._database_blocked, - self._connection_factory, - self._storage_connection_factory, - ), - ) - self._update_plugin_log_job = self._scheduler.add_job( - func=update_plugin_log, - trigger="interval", - seconds=1, - args=( - self._database_blocked, - self._connection_factory, - self._storage_connection_factory, - ), - ) - self._update_chunks_data_job = self._scheduler.add_job( - func=update_chunks_data, - trigger="interval", - seconds=5, - args=( - self._database_blocked, - self._connection_factory, - self._storage_connection_factory, - self._previous_chunk_data, - ), - ) - self._update_workload_statement_information_job = self._scheduler.add_job( - func=update_workload_statement_information, - trigger="interval", - seconds=5, - args=( - self._database_blocked, - self._connection_factory, - self._storage_connection_factory, - ), - ) - self._update_workload_operator_information_job = self._scheduler.add_job( - func=update_workload_operator_information, - trigger="interval", - seconds=5, - args=( - self._database_blocked, - self._connection_factory, - self._storage_connection_factory, - ), - ) - self._update_segment_configuration_job = self._scheduler.add_job( - func=update_segment_configuration, - trigger="interval", - seconds=5, - args=( - self._database_blocked, - self._connection_factory, - self._storage_connection_factory, - ), + self._continuous_job_handler = ContinuousJobHandler( + self._connection_factory, + self._hyrise_active, + self._worker_pool, + self._storage_connection_factory, + self._database_blocked, ) + self._continuous_job_handler.start() def start(self) -> None: """Start background scheduler.""" + # Todo replace self._scheduler with python threads self._scheduler.start() + self._continuous_job_handler.start() def close(self) -> None: """Close background scheduler.""" - self._update_workload_statement_information_job.remove() - self._update_system_data_job.remove() - self._update_chunks_data_job.remove() - self._update_storage_data_job.remove() - self._update_plugin_log_job.remove() - self._update_queue_length_job.remove() - self._update_workload_operator_information_job.remove() - self._ping_hyrise_job.remove() self._scheduler.shutdown() + self._continuous_job_handler.close() def load_tables(self, workload_type: str, scalefactor: float) -> bool: """Load tables.""" diff --git a/tests/database_manager/test_background_scheduler.py b/tests/database_manager/test_background_scheduler.py index 347e5b163..02e4c95c7 100644 --- a/tests/database_manager/test_background_scheduler.py +++ b/tests/database_manager/test_background_scheduler.py @@ -1,7 +1,6 @@ """Tests for the background_scheduler module.""" from multiprocessing import Value -from typing import Any, Callable, Tuple -from unittest.mock import call, patch +from unittest.mock import patch from pytest import fixture @@ -19,18 +18,6 @@ from hyrisecockpit.database_manager.job.load_tables import ( load_tables as load_tables_job, ) -from hyrisecockpit.database_manager.job.ping_hyrise import ping_hyrise -from hyrisecockpit.database_manager.job.update_chunks_data import update_chunks_data -from hyrisecockpit.database_manager.job.update_plugin_log import update_plugin_log -from hyrisecockpit.database_manager.job.update_queue_length import update_queue_length -from hyrisecockpit.database_manager.job.update_storage_data import update_storage_data -from hyrisecockpit.database_manager.job.update_system_data import update_system_data -from hyrisecockpit.database_manager.job.update_workload_operator_information import ( - update_workload_operator_information, -) -from hyrisecockpit.database_manager.job.update_workload_statement_information import ( - update_workload_statement_information, -) database_id: str = "MongoDB" database_blocked: Value = Value("b", False) @@ -50,7 +37,7 @@ class TestBackgroundJobManager: MagicMock(), ) @patch( - "hyrisecockpit.database_manager.background_scheduler.BackgroundJobManager._init_jobs", + "hyrisecockpit.database_manager.background_scheduler.ContinuousJobHandler", MagicMock(), ) def background_job_manager(self) -> BackgroundJobManager: @@ -70,12 +57,7 @@ def test_creates(self, background_job_manager: BackgroundJobManager): assert background_job_manager @patch("hyrisecockpit.database_manager.background_scheduler.BackgroundScheduler") - @patch( - "hyrisecockpit.database_manager.background_scheduler.BackgroundJobManager._init_jobs" - ) - def test_initializes_correctly( - self, mock_init_jobs: MagicMock, mock_background_scheduler: MagicMock - ) -> None: + def test_initializes_correctly(self, mock_background_scheduler: MagicMock) -> None: """A BackgroundJobManager initializes correctly.""" background_job_manager = BackgroundJobManager( database_id, @@ -97,105 +79,8 @@ def test_initializes_correctly( == storage_connection_factory ) assert background_job_manager._hyrise_active == hyrise_active - mock_init_jobs.assert_called_once() mock_background_scheduler.assert_called_once() - def test_initializes_background_scheduler_job( - self, background_job_manager: BackgroundJobManager - ) -> None: - """Test initialization of background scheduler job.""" - mock_scheduler: MagicMock = MagicMock() - mock_scheduler.add_job.return_value = None - background_job_manager._scheduler = mock_scheduler - - jobs: Tuple[Callable[..., Any], str, int] = [ - ( - update_queue_length, - "interval", - 1, - ( - background_job_manager._worker_pool, - background_job_manager._hyrise_active, - ), - ), - ( - update_workload_statement_information, - "interval", - 5, - (background_job_manager._storage_connection_factory,), - ), - ( - update_chunks_data, - "interval", - 5, - ( - background_job_manager._database_blocked, - background_job_manager._connection_factory, - background_job_manager._storage_connection_factory, - ), - ), - ( - update_system_data, - "interval", - 1, - ( - background_job_manager._database_blocked, - background_job_manager._connection_factory, - background_job_manager._storage_connection_factory, - ), - ), - ( - update_storage_data, - "interval", - 5, - ( - background_job_manager._database_blocked, - background_job_manager._connection_factory, - background_job_manager._storage_connection_factory, - ), - ), - ( - update_plugin_log, - "interval", - 1, - ( - background_job_manager._database_blocked, - background_job_manager._connection_factory, - background_job_manager._storage_connection_factory, - ), - ), - ( - update_workload_statement_information, - "interval", - 1, - ( - background_job_manager._database_blocked, - background_job_manager._connection_factory, - background_job_manager._storage_connection_factory, - ), - ), - ( - update_workload_operator_information, - "interval", - 1, - ( - background_job_manager._database_blocked, - background_job_manager._connection_factory, - background_job_manager._storage_connection_factory, - ), - ), - (ping_hyrise, "interval", 0.5, (background_job_manager._connection_factory, background_job_manager._hyrise_active)), # type: ignore - ] - - expected = [ - call.add_job(func=job[0], trigger=job[1], seconds=job[2], args=job[3]) # type: ignore - for job in jobs - ] - - background_job_manager._init_jobs() - - mock_scheduler.mock_calls == expected - def test_background_scheduler_starts( self, background_job_manager: BackgroundJobManager ) -> None: @@ -211,29 +96,13 @@ def test_background_scheduler_starts( def test_background_scheduler_closes( self, background_job_manager: BackgroundJobManager ) -> None: - """Test close of background scheduler object.""" + """Test close of background scheduler.""" mock_scheduler: MagicMock = MagicMock() - mock_scheduler.shutdown.return_value = None + mock_scheduler.close.return_value = None background_job_manager._scheduler = mock_scheduler - background_job_manager._update_workload_statement_information_job = MagicMock() - background_job_manager._update_system_data_job = MagicMock() - background_job_manager._update_chunks_data_job = MagicMock() - background_job_manager._update_storage_data_job = MagicMock() - background_job_manager._update_plugin_log_job = MagicMock() - background_job_manager._ping_hyrise_job = MagicMock() - background_job_manager._update_queue_length_job = MagicMock() - background_job_manager._update_workload_operator_information_job = MagicMock() background_job_manager.close() - background_job_manager._update_workload_statement_information_job.remove.assert_called_once() - background_job_manager._update_system_data_job.remove.assert_called_once() - background_job_manager._update_chunks_data_job.remove.assert_called_once() - background_job_manager._update_storage_data_job.remove.assert_called_once() - background_job_manager._update_plugin_log_job.remove.assert_called_once() - background_job_manager._ping_hyrise_job.remove.assert_called_once() - background_job_manager._update_queue_length_job.remove.assert_called_once() - background_job_manager._update_workload_operator_information_job.remove.assert_called_once() mock_scheduler.shutdown.assert_called_once() def test_successfully_start_loading_tables( diff --git a/tests/database_manager/test_continous_job_handler.py b/tests/database_manager/test_continous_job_handler.py index 687ce64ce..f1eda9cf5 100644 --- a/tests/database_manager/test_continous_job_handler.py +++ b/tests/database_manager/test_continous_job_handler.py @@ -167,3 +167,36 @@ def test_initializes_background_scheduler_job(self) -> None: continuous_job_handler._init_jobs() mock_scheduler.mock_calls == expected + + def test_background_scheduler_closes(self) -> None: + """Test close of background scheduler object.""" + continuous_job_handler = ContinuousJobHandler( + "connection_factory", + "hyrise_active", + "worker_pool", + "storage_connection_factory", + "database_blocked", + ) + mock_scheduler: MagicMock = MagicMock() + mock_scheduler.shutdown.return_value = None + continuous_job_handler._scheduler = mock_scheduler + continuous_job_handler._update_workload_statement_information_job = MagicMock() + continuous_job_handler._update_system_data_job = MagicMock() + continuous_job_handler._update_chunks_data_job = MagicMock() + continuous_job_handler._update_storage_data_job = MagicMock() + continuous_job_handler._update_plugin_log_job = MagicMock() + continuous_job_handler._ping_hyrise_job = MagicMock() + continuous_job_handler._update_queue_length_job = MagicMock() + continuous_job_handler._update_workload_operator_information_job = MagicMock() + + continuous_job_handler.close() + + continuous_job_handler._update_workload_statement_information_job.remove.assert_called_once() + continuous_job_handler._update_system_data_job.remove.assert_called_once() + continuous_job_handler._update_chunks_data_job.remove.assert_called_once() + continuous_job_handler._update_storage_data_job.remove.assert_called_once() + continuous_job_handler._update_plugin_log_job.remove.assert_called_once() + continuous_job_handler._ping_hyrise_job.remove.assert_called_once() + continuous_job_handler._update_queue_length_job.remove.assert_called_once() + continuous_job_handler._update_workload_operator_information_job.remove.assert_called_once() + mock_scheduler.shutdown.assert_called_once() From 2181e2153e560fd2d4cc41d5c7268902d1c66eec Mon Sep 17 00:00:00 2001 From: Alexander-Dubrawski Date: Tue, 8 Sep 2020 11:21:27 +0200 Subject: [PATCH 3/7] Remove background scheduler --- .../database_manager/background_scheduler.py | 66 ---- hyrisecockpit/database_manager/database.py | 24 +- .../test_background_scheduler.py | 172 ---------- tests/database_manager/test_database.py | 314 ++++++++++-------- 4 files changed, 192 insertions(+), 384 deletions(-) delete mode 100644 hyrisecockpit/database_manager/background_scheduler.py delete mode 100644 tests/database_manager/test_background_scheduler.py diff --git a/hyrisecockpit/database_manager/background_scheduler.py b/hyrisecockpit/database_manager/background_scheduler.py deleted file mode 100644 index 1dd5a73f6..000000000 --- a/hyrisecockpit/database_manager/background_scheduler.py +++ /dev/null @@ -1,66 +0,0 @@ -"""The BackgroundJobManager is managing the background jobs for the apscheduler.""" - -from multiprocessing import Value - -from .asynchronous_job_handler import AsynchronousJobHandler -from .continuous_job_handler import ContinuousJobHandler -from .cursor import ConnectionFactory, StorageConnectionFactory -from .worker_pool import WorkerPool - - -class BackgroundJobManager(object): - """Manage background scheduling jobs.""" - - def __init__( - self, - database_id: str, - database_blocked: Value, - connection_factory: ConnectionFactory, - hyrise_active: Value, - worker_pool: WorkerPool, - storage_connection_factory: StorageConnectionFactory, - workload_drivers, - ): - """Initialize BackgroundJobManager object.""" - self._database_id: str = database_id - self._database_blocked: Value = database_blocked - self._connection_factory: ConnectionFactory = connection_factory - self._storage_connection_factory: StorageConnectionFactory = storage_connection_factory - self._worker_pool: WorkerPool = worker_pool - self._workload_drivers = workload_drivers - self._hyrise_active: Value = hyrise_active - self._continuous_job_handler = ContinuousJobHandler( - self._connection_factory, - self._hyrise_active, - self._worker_pool, - self._storage_connection_factory, - self._database_blocked, - ) - self._asynchronous_job_handler = AsynchronousJobHandler( - self._database_blocked, self._connection_factory, self._workload_drivers, - ) - self._continuous_job_handler.start() - - def start(self) -> None: - """Start background scheduler.""" - self._continuous_job_handler.start() - - def close(self) -> None: - """Close background scheduler.""" - self._continuous_job_handler.close() - - def load_tables(self, workload_type: str, scalefactor: float) -> bool: - """Load tables.""" - return self._asynchronous_job_handler.load_tables(workload_type, scalefactor) - - def delete_tables(self, workload_type: str, scalefactor: float) -> bool: - """Delete tables.""" - return self._asynchronous_job_handler.delete_tables(workload_type, scalefactor) - - def activate_plugin(self, plugin: str) -> bool: - """Activate plugin.""" - return self._asynchronous_job_handler.activate_plugin(plugin) - - def deactivate_plugin(self, plugin: str) -> bool: - """Dectivate plugin.""" - return self._asynchronous_job_handler.deactivate_plugin(plugin) diff --git a/hyrisecockpit/database_manager/database.py b/hyrisecockpit/database_manager/database.py index 3771c218c..a6070d2ce 100644 --- a/hyrisecockpit/database_manager/database.py +++ b/hyrisecockpit/database_manager/database.py @@ -7,7 +7,8 @@ from hyrisecockpit.drivers.connector import Connector -from .background_scheduler import BackgroundJobManager +from .asynchronous_job_handler import AsynchronousJobHandler +from .continuous_job_handler import ContinuousJobHandler from .cursor import ConnectionFactory, StorageConnectionFactory from .interfaces import SqlResultInterface from .worker_pool import WorkerPool @@ -69,17 +70,18 @@ def __init__( self._database_blocked, self._workload_drivers, ) - self._background_scheduler: BackgroundJobManager = BackgroundJobManager( - self._id, - self._database_blocked, + self._continuous_job_handler = ContinuousJobHandler( self._connection_factory, self._hyrise_active, self._worker_pool, self._storage_connection_factory, - self._workload_drivers, + self._database_blocked, + ) + self._asynchronous_job_handler = AsynchronousJobHandler( + self._database_blocked, self._connection_factory, self._workload_drivers, ) self._initialize_influx() - self._background_scheduler.start() + self._continuous_job_handler.start() def _initialize_influx(self) -> None: """Initialize Influx database.""" @@ -178,7 +180,7 @@ def load_data(self, workload: Dict) -> bool: elif self._worker_pool.get_status() != "closed": return False else: - return self._background_scheduler.load_tables( + return self._asynchronous_job_handler.load_tables( workload_type, float(scale_factor) ) @@ -195,7 +197,7 @@ def delete_data(self, workload: Dict) -> bool: elif self._worker_pool.get_status() != "closed": return False else: - return self._background_scheduler.delete_tables( + return self._asynchronous_job_handler.delete_tables( workload_type, float(scale_factor) ) @@ -204,11 +206,11 @@ def activate_plugin(self, plugin: str) -> bool: active_plugins = self._get_plugins() if active_plugins is None or plugin in active_plugins: return False - return self._background_scheduler.activate_plugin(plugin) + return self._asynchronous_job_handler.activate_plugin(plugin) def deactivate_plugin(self, plugin: str) -> bool: """Deactivate plugin.""" - return self._background_scheduler.deactivate_plugin(plugin) + return self._asynchronous_job_handler.deactivate_plugin(plugin) def get_database_blocked(self) -> bool: """Return tables loading flag.""" @@ -367,4 +369,4 @@ def execute_sql_query(self, query) -> Optional[SqlResultInterface]: def close(self) -> None: """Close the database.""" self._worker_pool.terminate() - self._background_scheduler.close() + self._continuous_job_handler.close() diff --git a/tests/database_manager/test_background_scheduler.py b/tests/database_manager/test_background_scheduler.py deleted file mode 100644 index e9a7e9632..000000000 --- a/tests/database_manager/test_background_scheduler.py +++ /dev/null @@ -1,172 +0,0 @@ -"""Tests for the background_scheduler module.""" -from multiprocessing import Value -from unittest.mock import patch - -from pytest import fixture - -from hyrisecockpit.cross_platform_support.testing_support import MagicMock -from hyrisecockpit.database_manager.background_scheduler import BackgroundJobManager - -database_id: str = "MongoDB" -database_blocked: Value = Value("b", False) -hyrise_active: Value = Value("b", True) -storage_connection_factory: MagicMock = MagicMock() -connection_factory: MagicMock = MagicMock() -worker_pool: MagicMock = MagicMock() -workload_drivers = MagicMock() - - -class TestBackgroundJobManager: - """Tests for the BackgroundJobManager class.""" - - @fixture - @patch( - "hyrisecockpit.database_manager.background_scheduler.ContinuousJobHandler", - MagicMock(), - ) - @patch( - "hyrisecockpit.database_manager.background_scheduler.AsynchronousJobHandler", - MagicMock(), - ) - def background_job_manager(self) -> BackgroundJobManager: - """Get a new BackgrpundJobManager.""" - return BackgroundJobManager( - database_id, - database_blocked, - connection_factory, - hyrise_active, - worker_pool, - storage_connection_factory, - workload_drivers, - ) - - def test_creates(self, background_job_manager: BackgroundJobManager): - """A BackgroundJobManager can be created.""" - assert background_job_manager - - @patch("hyrisecockpit.database_manager.background_scheduler.ContinuousJobHandler") - @patch( - "hyrisecockpit.database_manager.background_scheduler.AsynchronousJobHandler", - ) - def test_initializes_correctly( - self, - mock_asynchronous_job_handler: MagicMock, - mock_continuous_job_handler: MagicMock, - ) -> None: - """A BackgroundJobManager initializes correctly.""" - background_job_manager = BackgroundJobManager( - database_id, - database_blocked, - connection_factory, - hyrise_active, - worker_pool, - storage_connection_factory, - workload_drivers, - ) - - assert isinstance(background_job_manager, BackgroundJobManager) - assert background_job_manager._database_id == database_id - assert background_job_manager._database_blocked == database_blocked - assert background_job_manager._connection_factory == connection_factory - assert background_job_manager._workload_drivers == workload_drivers - assert ( - background_job_manager._storage_connection_factory - == storage_connection_factory - ) - assert background_job_manager._hyrise_active == hyrise_active - mock_asynchronous_job_handler.assert_called_once_with( - background_job_manager._database_blocked, - background_job_manager._connection_factory, - background_job_manager._workload_drivers, - ) - mock_continuous_job_handler.assert_called_once_with( - background_job_manager._connection_factory, - background_job_manager._hyrise_active, - background_job_manager._worker_pool, - background_job_manager._storage_connection_factory, - background_job_manager._database_blocked, - ) - - def test_background_scheduler_starts( - self, background_job_manager: BackgroundJobManager - ) -> None: - """Test start of background scheduler.""" - mock_scheduler: MagicMock = MagicMock() - mock_scheduler.start.return_value = None - background_job_manager._continuous_job_handler = mock_scheduler - - background_job_manager.start() - - mock_scheduler.start.assert_called_once() - - def test_background_scheduler_closes( - self, background_job_manager: BackgroundJobManager - ) -> None: - """Test close of background scheduler.""" - mock_scheduler: MagicMock = MagicMock() - mock_scheduler.close.return_value = None - background_job_manager._continuous_job_handler = mock_scheduler - - background_job_manager.close() - - mock_scheduler.close.assert_called_once() - - def test_start_loading_tables( - self, background_job_manager: BackgroundJobManager - ) -> None: - """Test successfully start loading tables job.""" - mock_asynchronous_job_handler = MagicMock() - mock_asynchronous_job_handler.return_value = True - background_job_manager._asynchronous_job_handler = mock_asynchronous_job_handler - workload_type = "TPC-C" - scalefactor = 5.0 - - result = background_job_manager.load_tables(workload_type, scalefactor) - - mock_asynchronous_job_handler.load_tables.assert_called_once_with( - workload_type, scalefactor - ) - assert result - - def test_delete_tables(self, background_job_manager: BackgroundJobManager) -> None: - """Test successfully delete loading tables job.""" - mock_asynchronous_job_handler = MagicMock() - mock_asynchronous_job_handler.return_value = True - background_job_manager._asynchronous_job_handler = mock_asynchronous_job_handler - workload_type = "TPC-C" - scalefactor = 5.0 - - result = background_job_manager.delete_tables(workload_type, scalefactor) - - mock_asynchronous_job_handler.delete_tables.assert_called_once_with( - workload_type, scalefactor - ) - assert result - - def test_activate_plugin( - self, background_job_manager: BackgroundJobManager - ) -> None: - """Test successfully start activate plug-in job.""" - mock_asynchronous_job_handler = MagicMock() - mock_asynchronous_job_handler.return_value = True - background_job_manager._asynchronous_job_handler = mock_asynchronous_job_handler - plugin = "compression" - - result = background_job_manager.activate_plugin(plugin) - - mock_asynchronous_job_handler.activate_plugin.assert_called_once_with(plugin) - assert result - - def test_successfully_deactivate_plugin( - self, background_job_manager: BackgroundJobManager - ) -> None: - """Test successfully start deactivate plug-in job.""" - mock_asynchronous_job_handler = MagicMock() - mock_asynchronous_job_handler.return_value = True - background_job_manager._asynchronous_job_handler = mock_asynchronous_job_handler - plugin = "compression" - - result = background_job_manager.deactivate_plugin(plugin) - - mock_asynchronous_job_handler.deactivate_plugin.assert_called_once_with(plugin) - assert result diff --git a/tests/database_manager/test_database.py b/tests/database_manager/test_database.py index 6b084ae45..3093031e7 100644 --- a/tests/database_manager/test_database.py +++ b/tests/database_manager/test_database.py @@ -2,7 +2,7 @@ from collections import Counter from multiprocessing.sharedctypes import Synchronized as ValueType -from typing import Dict, List, Optional +from typing import List, Optional from unittest.mock import MagicMock, patch from psycopg2 import DatabaseError, Error, InterfaceError @@ -24,42 +24,22 @@ storage_port: str = "42" storage_user: str = "Käptin Blaubär" -mocked_pool_cur: MagicMock = MagicMock() -mocked_pool_cur.fetchall.return_value = [] - - -def get_fake_tables() -> Dict: - """Return fake table dictionary.""" - fake_dict: Dict[str, List[str]] = { - "alternative": [ - "The Dough Rollers", - "Broken Witt Rebels", - "Bonny Doon", - "Jack White", - ], - "Rock": ["Gary Clark Jr.", "Greta Van Fleet", "Tenacious D"], - } - return fake_dict - - -def get_fake_background_job_manager(*args) -> MagicMock: - """Return fake BackgroundJobManager.""" - mocked_job_manager: MagicMock = MagicMock() - mocked_job_manager.start.side_effect = None - mocked_job_manager.load_tables.side_effect = None - return mocked_job_manager - class TestDatabase(object): """Tests for the Database class.""" @fixture + @patch("hyrisecockpit.database_manager.database.WorkerPool", MagicMock()) @patch( - "hyrisecockpit.database_manager.database.BackgroundJobManager", - get_fake_background_job_manager, + "hyrisecockpit.database_manager.database.ContinuousJobHandler", MagicMock(), + ) + @patch( + "hyrisecockpit.database_manager.database.AsynchronousJobHandler", MagicMock() ) - @patch("hyrisecockpit.database_manager.database.WorkerPool", MagicMock()) @patch("hyrisecockpit.database_manager.database.ConnectionFactory", MagicMock()) + @patch( + "hyrisecockpit.database_manager.database.StorageConnectionFactory", MagicMock() + ) @patch( "hyrisecockpit.database_manager.database.Database._initialize_influx", MagicMock(), @@ -82,8 +62,69 @@ def database(self) -> Database: storage_user, ) - def test_inintializes_database(self, database: Database) -> None: + @patch("hyrisecockpit.database_manager.database.Connector") + @patch("hyrisecockpit.database_manager.database.WorkerPool") + @patch("hyrisecockpit.database_manager.database.ContinuousJobHandler",) + @patch("hyrisecockpit.database_manager.database.AsynchronousJobHandler") + @patch("hyrisecockpit.database_manager.database.ConnectionFactory") + @patch("hyrisecockpit.database_manager.database.StorageConnectionFactory") + @patch("hyrisecockpit.database_manager.database.Database._initialize_influx") + def test_inintializes_database( + self, + mock_initialize_influx: MagicMock, + mock_storage_connection_factory: MagicMock, + mock_connection_factory: MagicMock, + mock_asynchronous_job_handler: MagicMock, + mock_continuous_job_handler: MagicMock, + mock_worker_pool: MagicMock, + mock_connector: MagicMock, + ) -> None: """Test initialization of database attributes.""" + database = Database( + database_id, + database_user, + database_password, + database_host, + database_port, + database_name, + number_workers, + workload_publisher_url, + default_tables, + storage_host, + storage_password, + storage_port, + storage_user, + ) + mock_connection_factory.assert_called_once_with( + **database.connection_information + ) + mock_storage_connection_factory.assert_called_once_with( + storage_user, storage_password, storage_host, storage_port, database_id + ) + mock_connector.get_workload_drivers.assert_called_once() + mock_worker_pool.assert_called_once_with( + database._connection_factory, + number_workers, + database_id, + workload_publisher_url, + database._database_blocked, + database._workload_drivers, + ) + mock_continuous_job_handler.assert_called_once_with( + database._connection_factory, + database._hyrise_active, + database._worker_pool, + database._storage_connection_factory, + database._database_blocked, + ) + mock_asynchronous_job_handler.assert_called_once_with( + database._database_blocked, + database._connection_factory, + database._workload_drivers, + ) + database._continuous_job_handler.start.assert_called_once() # type: ignore + mock_initialize_influx.assert_called_once() + assert database._id == database_id assert database.number_workers == number_workers assert database._default_tables == default_tables @@ -91,7 +132,6 @@ def test_inintializes_database(self, database: Database) -> None: assert type(database._hyrise_active) is ValueType assert database._hyrise_active.value assert not database._database_blocked.value - database._background_scheduler.start.assert_called_once() # type: ignore def test_gets_worker_pool_queue_length(self, database: Database) -> None: """Test return of queue length from worker pool.""" @@ -108,69 +148,69 @@ def test_loads_data_while_worker_pool_is_closed_and_load_table_is_successful( self, database: Database ) -> None: """Test loading data while worker pool is closed and background scheduler is returning true.""" - mocked_worker_pool: MagicMock = MagicMock() - mocked_worker_pool.get_status.return_value = "closed" - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.load_tables.return_value = True + mock_worker_pool: MagicMock = MagicMock() + mock_worker_pool.get_status.return_value = "closed" + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.load_tables.return_value = True fake_workload = {"workload_type": "tpch", "scale_factor": 1.0} - database._worker_pool = mocked_worker_pool - database._background_scheduler = mocked_background_scheduler + database._worker_pool = mock_worker_pool + database._asynchronous_job_handler = mock_asynchronous_job_handler result: bool = database.load_data(fake_workload) - mocked_worker_pool.get_status.assert_called_once() - mocked_background_scheduler.load_tables.assert_called_once_with("tpch", 1.0) + mock_worker_pool.get_status.assert_called_once() + mock_asynchronous_job_handler.load_tables.assert_called_once_with("tpch", 1.0) assert type(result) is bool assert result def test_loads_data_while_worker_pool_is_running(self, database: Database) -> None: """Test loading data while worker pool is running.""" - mocked_worker_pool: MagicMock = MagicMock() - mocked_worker_pool.get_status.return_value = "running" - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.load_tables.return_value = True + mock_worker_pool: MagicMock = MagicMock() + mock_worker_pool.get_status.return_value = "running" + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.load_tables.return_value = True fake_workload = {"workload_type": "tpch", "scale_factor": 1.0} - database._worker_pool = mocked_worker_pool - database._background_scheduler = mocked_background_scheduler + database._worker_pool = mock_worker_pool + database._asynchronous_job_handler = mock_asynchronous_job_handler result: bool = database.load_data(fake_workload) - mocked_worker_pool.get_status.assert_called_once() - mocked_background_scheduler.load_tables.assert_not_called() + mock_worker_pool.get_status.assert_called_once() + mock_asynchronous_job_handler.load_tables.assert_not_called() assert type(result) is bool assert not result def test_loads_data_with_no_valid_workload_type(self, database: Database) -> None: """Test load data with no valid workload type.""" - mocked_worker_pool: MagicMock = MagicMock() - mocked_worker_pool.get_status.return_value = "running" - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.load_tables.return_value = True + mock_worker_pool: MagicMock = MagicMock() + mock_worker_pool.get_status.return_value = "running" + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.load_tables.return_value = True fake_workload = {"workload_type": "tpcc_not_valid", "scale_factor": 1.0} - database._worker_pool = mocked_worker_pool - database._background_scheduler = mocked_background_scheduler + database._worker_pool = mock_worker_pool + database._asynchronous_job_handler = mock_asynchronous_job_handler result: bool = database.load_data(fake_workload) - mocked_background_scheduler.load_tables.assert_not_called() + mock_asynchronous_job_handler.load_tables.assert_not_called() assert type(result) is bool assert not result def test_loads_data_with_no_valid_scalefactor(self, database: Database) -> None: """Test load data with no valid scalefactor.""" - mocked_worker_pool: MagicMock = MagicMock() - mocked_worker_pool.get_status.return_value = "running" - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.load_tables.return_value = True + mock_worker_pool: MagicMock = MagicMock() + mock_worker_pool.get_status.return_value = "running" + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.load_tables.return_value = True fake_workload = {"workload_type": "tpcc", "scale_factor": 1.0} - database._worker_pool = mocked_worker_pool - database._background_scheduler = mocked_background_scheduler + database._worker_pool = mock_worker_pool + database._asynchronous_job_handler = mock_asynchronous_job_handler mock_driver = MagicMock() mock_driver.get_scalefactors.return_value = [5.0] database._workload_drivers = {"tpcc": mock_driver} result: bool = database.load_data(fake_workload) - mocked_background_scheduler.load_tables.assert_not_called() + mock_asynchronous_job_handler.load_tables.assert_not_called() assert type(result) is bool assert not result @@ -178,18 +218,18 @@ def test_loads_data_while_worker_pool_is_closed_and_load_table_failed( self, database: Database ) -> None: """Test loading data while worker pool is running.""" - mocked_worker_pool: MagicMock = MagicMock() - mocked_worker_pool.get_status.return_value = "closed" - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.load_tables.return_value = False + mock_worker_pool: MagicMock = MagicMock() + mock_worker_pool.get_status.return_value = "closed" + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.load_tables.return_value = False fake_workload = {"workload_type": "tpch", "scale_factor": 1.0} - database._worker_pool = mocked_worker_pool - database._background_scheduler = mocked_background_scheduler + database._worker_pool = mock_worker_pool + database._asynchronous_job_handler = mock_asynchronous_job_handler result: bool = database.load_data(fake_workload) - mocked_worker_pool.get_status.assert_called_once() - mocked_background_scheduler.load_tables.assert_called_once_with("tpch", 1.0) + mock_worker_pool.get_status.assert_called_once() + mock_asynchronous_job_handler.load_tables.assert_called_once_with("tpch", 1.0) assert type(result) is bool assert not result @@ -197,18 +237,18 @@ def test_deletes_data_while_worker_pool_is_closed_and_load_table_is_successful( self, database: Database ) -> None: """Test delete of data while worker pool is closed and background scheduler is returning true.""" - mocked_worker_pool: MagicMock = MagicMock() - mocked_worker_pool.get_status.return_value = "closed" - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.delete_tables.return_value = True + mock_worker_pool: MagicMock = MagicMock() + mock_worker_pool.get_status.return_value = "closed" + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.delete_tables.return_value = True fake_workload = {"workload_type": "tpch", "scale_factor": 1.0} - database._worker_pool = mocked_worker_pool - database._background_scheduler = mocked_background_scheduler + database._worker_pool = mock_worker_pool + database._asynchronous_job_handler = mock_asynchronous_job_handler result: bool = database.delete_data(fake_workload) - mocked_worker_pool.get_status.assert_called_once() - mocked_background_scheduler.delete_tables.assert_called_once_with("tpch", 1.0) + mock_worker_pool.get_status.assert_called_once() + mock_asynchronous_job_handler.delete_tables.assert_called_once_with("tpch", 1.0) assert type(result) is bool assert result @@ -216,53 +256,53 @@ def test_deletes_data_while_worker_pool_is_running( self, database: Database ) -> None: """Test delete of data while worker pool is running.""" - mocked_worker_pool: MagicMock = MagicMock() - mocked_worker_pool.get_status.return_value = "running" - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.delete_tables.return_value = True + mock_worker_pool: MagicMock = MagicMock() + mock_worker_pool.get_status.return_value = "running" + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.delete_tables.return_value = True fake_workload = {"workload_type": "tpch", "scale_factor": 1.0} - database._worker_pool = mocked_worker_pool - database._background_scheduler = mocked_background_scheduler + database._worker_pool = mock_worker_pool + database._asynchronous_job_handler = mock_asynchronous_job_handler result: bool = database.delete_data(fake_workload) - mocked_worker_pool.get_status.assert_called_once() - mocked_background_scheduler.delete_tables.assert_not_called() + mock_worker_pool.get_status.assert_called_once() + mock_asynchronous_job_handler.delete_tables.assert_not_called() assert type(result) is bool assert not result def test_delete_data_with_no_valid_workload_type(self, database: Database) -> None: """Test delete data with no valid workload type.""" - mocked_worker_pool: MagicMock = MagicMock() - mocked_worker_pool.get_status.return_value = "running" - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.load_tables.return_value = True + mock_worker_pool: MagicMock = MagicMock() + mock_worker_pool.get_status.return_value = "running" + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.load_tables.return_value = True fake_workload = {"workload_type": "tpcc_not_valid", "scale_factor": 1.0} - database._worker_pool = mocked_worker_pool - database._background_scheduler = mocked_background_scheduler + database._worker_pool = mock_worker_pool + database._asynchronous_job_handler = mock_asynchronous_job_handler result: bool = database.delete_data(fake_workload) - mocked_background_scheduler.delete_tables.assert_not_called() + mock_asynchronous_job_handler.delete_tables.assert_not_called() assert type(result) is bool assert not result def test_delete_data_with_no_valid_scalefactor(self, database: Database) -> None: """Test delete data with no valid scalefactor.""" - mocked_worker_pool: MagicMock = MagicMock() - mocked_worker_pool.get_status.return_value = "running" - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.load_tables.return_value = True + mock_worker_pool: MagicMock = MagicMock() + mock_worker_pool.get_status.return_value = "running" + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.load_tables.return_value = True fake_workload = {"workload_type": "tpcc", "scale_factor": 1.0} - database._worker_pool = mocked_worker_pool - database._background_scheduler = mocked_background_scheduler + database._worker_pool = mock_worker_pool + database._asynchronous_job_handler = mock_asynchronous_job_handler mock_driver = MagicMock() mock_driver.get_scalefactors.return_value = [5.0] database._workload_drivers = {"tpcc": mock_driver} result: bool = database.delete_data(fake_workload) - mocked_background_scheduler.delete_tables.assert_not_called() + mock_asynchronous_job_handler.delete_tables.assert_not_called() assert type(result) is bool assert not result @@ -270,50 +310,54 @@ def test_deletes_data_while_worker_pool_is_closed_and_load_table_failed( self, database: Database ) -> None: """Test delete of data while worker pool is running.""" - mocked_worker_pool: MagicMock = MagicMock() - mocked_worker_pool.get_status.return_value = "closed" - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.delete_tables.return_value = False + mock_worker_pool: MagicMock = MagicMock() + mock_worker_pool.get_status.return_value = "closed" + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.delete_tables.return_value = False fake_workload = {"workload_type": "tpch", "scale_factor": 1.0} - database._worker_pool = mocked_worker_pool - database._background_scheduler = mocked_background_scheduler + database._worker_pool = mock_worker_pool + database._asynchronous_job_handler = mock_asynchronous_job_handler result: bool = database.delete_data(fake_workload) - mocked_worker_pool.get_status.assert_called_once() - mocked_background_scheduler.delete_tables.assert_called_once_with("tpch", 1.0) + mock_worker_pool.get_status.assert_called_once() + mock_asynchronous_job_handler.delete_tables.assert_called_once_with("tpch", 1.0) assert type(result) is bool assert not result def test_activates_plugin_with_success(self, database: Database) -> None: """Test entry point for plug-in activation with success.""" - mock_background_scheduler: MagicMock = MagicMock() - mock_background_scheduler.activate_plugin.return_value = True + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.activate_plugin.return_value = True mock_get_plugins = MagicMock() mock_get_plugins.return_value = [] - database._background_scheduler = mock_background_scheduler + database._asynchronous_job_handler = mock_asynchronous_job_handler database._get_plugins = mock_get_plugins # type: ignore fake_plugin: str = "Coolputer" result: bool = database.activate_plugin(fake_plugin) - mock_background_scheduler.activate_plugin.assert_called_once_with(fake_plugin) + mock_asynchronous_job_handler.activate_plugin.assert_called_once_with( + fake_plugin + ) assert type(result) is bool assert result def test_activates_plugin_with_no_success(self, database: Database) -> None: """Test entry point for plug-in activation with no success.""" - mock_background_scheduler: MagicMock = MagicMock() - mock_background_scheduler.activate_plugin.return_value = False + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.activate_plugin.return_value = False mock_get_plugins = MagicMock() mock_get_plugins.return_value = [] - database._background_scheduler = mock_background_scheduler + database._asynchronous_job_handler = mock_asynchronous_job_handler database._get_plugins = mock_get_plugins # type: ignore fake_plugin: str = "Coolputer" result: bool = database.activate_plugin(fake_plugin) - mock_background_scheduler.activate_plugin.assert_called_once_with(fake_plugin) + mock_asynchronous_job_handler.activate_plugin.assert_called_once_with( + fake_plugin + ) assert type(result) is bool assert not result @@ -321,30 +365,30 @@ def test_activates_already_loaded_plugin_with_no_success( self, database: Database ) -> None: """Test entry point for plug-in activation of the already loaded plugin with no success.""" - mock_background_scheduler: MagicMock = MagicMock() - mock_background_scheduler.activate_plugin.return_value = False + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.activate_plugin.return_value = False mock_get_plugins = MagicMock() mock_get_plugins.return_value = ["Coolputer"] - database._background_scheduler = mock_background_scheduler + database._asynchronous_job_handler = mock_asynchronous_job_handler database._get_plugins = mock_get_plugins # type: ignore fake_plugin: str = "Coolputer" result: bool = database.activate_plugin(fake_plugin) - mock_background_scheduler.activate_plugin.assert_not_called() + mock_asynchronous_job_handler.activate_plugin.assert_not_called() assert type(result) is bool assert not result def test_deactivats_plugin_with_success(self, database: Database) -> None: """Test entry point for plug-in deactivation with success.""" - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.deactivate_plugin.return_value = True + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.deactivate_plugin.return_value = True fake_plugin: str = "Coolputer" - database._background_scheduler = mocked_background_scheduler + database._asynchronous_job_handler = mock_asynchronous_job_handler result: bool = database.deactivate_plugin(fake_plugin) - mocked_background_scheduler.deactivate_plugin.assert_called_once_with( + mock_asynchronous_job_handler.deactivate_plugin.assert_called_once_with( fake_plugin ) assert type(result) is bool @@ -352,14 +396,14 @@ def test_deactivats_plugin_with_success(self, database: Database) -> None: def test_deactivats_plugin_with_no_success(self, database: Database) -> None: """Test entry point for plug-in deactivation with no success.""" - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.deactivate_plugin.return_value = False + mock_asynchronous_job_handler: MagicMock = MagicMock() + mock_asynchronous_job_handler.deactivate_plugin.return_value = False fake_plugin: str = "Coolputer" - database._background_scheduler = mocked_background_scheduler + database._asynchronous_job_handler = mock_asynchronous_job_handler result: bool = database.deactivate_plugin(fake_plugin) - mocked_background_scheduler.deactivate_plugin.assert_called_once_with( + mock_asynchronous_job_handler.deactivate_plugin.assert_called_once_with( fake_plugin ) assert type(result) is bool @@ -851,17 +895,17 @@ def test_executes_sql_query_while_database_is_blocked( def test_closes_database(self, database: Database) -> None: """Test closing of database.""" - mocked_worker_pool: MagicMock = MagicMock() - mocked_worker_pool.terminate.return_value = None - mocked_background_scheduler: MagicMock = MagicMock() - mocked_background_scheduler.close.return_value = None + mock_worker_pool: MagicMock = MagicMock() + mock_worker_pool.terminate.return_value = None + mock_continuous_job_handler: MagicMock = MagicMock() + mock_continuous_job_handler.close.return_value = None - database._worker_pool = mocked_worker_pool - database._background_scheduler = mocked_background_scheduler + database._worker_pool = mock_worker_pool + database._continuous_job_handler = mock_continuous_job_handler database.close() - mocked_worker_pool.terminate.assert_called_once() - mocked_background_scheduler.close.assert_called_once() + mock_worker_pool.terminate.assert_called_once() + mock_continuous_job_handler.close.assert_called_once() def test_initializes_influx(self, database: Database) -> None: """Test intialization of the corresponding influx database.""" From c629723cd880ec7d916d8d61602257784fac8a20 Mon Sep 17 00:00:00 2001 From: Alexander-Dubrawski Date: Tue, 8 Sep 2020 12:20:06 +0200 Subject: [PATCH 4/7] Improve docstrings --- hyrisecockpit/database_manager/cli.py | 2 - .../continuous_job_handler.py | 36 +++++++- hyrisecockpit/database_manager/database.py | 90 ++++++++++++++++--- hyrisecockpit/database_manager/manager.py | 3 - tests/database_manager/test_database.py | 4 - .../database_manager/test_database_manager.py | 4 - 6 files changed, 111 insertions(+), 28 deletions(-) diff --git a/hyrisecockpit/database_manager/cli.py b/hyrisecockpit/database_manager/cli.py index 9c3028c16..06f5fff98 100644 --- a/hyrisecockpit/database_manager/cli.py +++ b/hyrisecockpit/database_manager/cli.py @@ -2,7 +2,6 @@ from hyrisecockpit.settings import ( DB_MANAGER_LISTENING, DB_MANAGER_PORT, - DEFAULT_TABLES, STORAGE_HOST, STORAGE_PASSWORD, STORAGE_PORT, @@ -22,7 +21,6 @@ def main() -> None: DB_MANAGER_PORT, WORKLOAD_SUB_HOST, WORKLOAD_PUBSUB_PORT, - DEFAULT_TABLES, STORAGE_HOST, STORAGE_PASSWORD, STORAGE_PORT, diff --git a/hyrisecockpit/database_manager/continuous_job_handler.py b/hyrisecockpit/database_manager/continuous_job_handler.py index 18e3fe73f..1ecaa77a3 100644 --- a/hyrisecockpit/database_manager/continuous_job_handler.py +++ b/hyrisecockpit/database_manager/continuous_job_handler.py @@ -22,7 +22,12 @@ class ContinuousJobHandler: - """Continuous Job Handler.""" + """Continuous Job Handler. + + This class manage all jobs that are executed continuously in the background. + The jobs are executed via a background scheduler + (https://apscheduler.readthedocs.io/en/stable/). + """ def __init__( self, @@ -32,7 +37,22 @@ def __init__( storage_connection_factory: StorageConnectionFactory, database_blocked: Value, ): - """Initialize continuous Job Handler.""" + """Initialize continuous Job Handler. + + Args: + connection_factory: An object to create a connection to the Hyrise + database. All connection relevant information (port, host) is + saved in this object. + hyrise_active: Flag stored in a shared memory map. This flag + stores if the Hyrise instance is responsive or not. + worker_pool: An object that manages the queue and task/queue workers. Its + api is used to get the queue length. + storage_connection_factory: An object to create a connection to the Influx + database. All connection relevant information (port, host) is + saved in this object. + database_blocked: Flag stored in a shared memory map. This flag + stores if the Hyrise instance is blocked or not. + """ self._connection_factory = connection_factory self._hyrise_active = hyrise_active self._worker_pool = worker_pool @@ -49,7 +69,11 @@ def __init__( self._init_jobs() def _init_jobs(self) -> None: - """Initialize basic background jobs.""" + """Initialize basic background jobs. + + This function registers all continuous jobs in the background + scheduler. + """ self._ping_hyrise_job = self._scheduler.add_job( func=ping_hyrise, trigger="interval", @@ -140,7 +164,11 @@ def start(self) -> None: self._scheduler.start() def close(self) -> None: - """Close background scheduler.""" + """Close background scheduler. + + Here we remove all jobs from the background scheduler and + shutting it down. + """ self._update_workload_statement_information_job.remove() self._update_system_data_job.remove() self._update_chunks_data_job.remove() diff --git a/hyrisecockpit/database_manager/database.py b/hyrisecockpit/database_manager/database.py index a6070d2ce..64a2ac8e0 100644 --- a/hyrisecockpit/database_manager/database.py +++ b/hyrisecockpit/database_manager/database.py @@ -20,7 +20,14 @@ class Database(object): - """Represents database.""" + """Represents database. + + The database class is a representation of a registered Hyrise instance. + All the communication with the Hyrise is managed by this class and its API. + A worker pool object that manages the queue and task/queue workers to put the + Hyrise under pressure is part of this class. This class also initializes the + influx database for the Hyrise instance. + """ def __init__( self, @@ -32,16 +39,40 @@ def __init__( dbname: str, number_workers: int, workload_publisher_url: str, - default_tables: str, storage_host: str, storage_password: str, storage_port: str, storage_user: str, ) -> None: - """Initialize database object.""" + """Initialize database object. + + Args: + id: A name representation of the Hyrise instance. + user: The user that connects to the Hyrise instance. + password: The password needed to connects to the Hyrise instance. + host: The address of the host machine where the Hyrise instance is running. + port: The port on which the Hyrise instance is running. + dbname: The database name of the Hyrise instance. + number_workers: Number of task workers to put the Hyrise instance under + pressure. Every worker is running in its own process. + workload_publisher_url: URL which is publishing the workload to the workers. + This URL is used by the queue worker to connect to the workload + generator via zeromq. + storage_host: Address in which the influx database is running. + storage_password: Password to connect to the influx database. + storage_port: Port of the influx database. + storage_user: User of the influx database. + + Note: + The attributes user, password, host, port and dbname are the same attributes + that you would use to create a connection to the Hyrise instance with psql. + + Attributes: + _connection_factory: This factory builds a Hyrise connection. + _storage_connection_factory: This factory builds a influx connection. + """ self._id = id self.number_workers: int = number_workers - self._default_tables: str = default_tables self.connection_information: Dict[str, str] = { "host": host, @@ -84,7 +115,15 @@ def __init__( self._continuous_job_handler.start() def _initialize_influx(self) -> None: - """Initialize Influx database.""" + """Initialize Influx database. + + We drop the database inside influx that has the same name like the + database id (Hyrise). We do that to remove all the data from previous + runs. Than we create a new database inside influx with the database id + (Hyrise). After that we create continuous query that the influx is running + every x seconds. For example it will so automatically calculate the throughput + per second. + """ with self._storage_connection_factory.create_cursor() as cursor: cursor.drop_database() cursor.create_database() @@ -164,11 +203,23 @@ def _initialize_influx(self) -> None: ) def get_queue_length(self) -> int: - """Return queue length.""" + """Return queue length. + + We return the number of task that still need to be send to the + Hyrise instance. + """ return self._worker_pool.get_queue_length() def load_data(self, workload: Dict) -> bool: - """Load pre-generated tables.""" + """Load pre-generated tables. + + First we check if the workload and scale factor is valid. + For that workload need to have an equivalent driver and the + driver needs to support the scale factor. Moreover the worker pool + needs to be closed. If one of this requirements is not met the function + will return false. Otherwise the tables will be loaded via the + asynchronous job handler. + """ workload_type = workload["workload_type"] scale_factor = workload["scale_factor"] if workload_type not in self._workload_drivers: @@ -185,7 +236,15 @@ def load_data(self, workload: Dict) -> bool: ) def delete_data(self, workload: Dict) -> bool: - """Delete tables.""" + """Delete tables. + + First we check if the workload and scale factor is valid. + For that workload need to have an equivalent driver and the + driver needs to support the scale factor. Moreover the worker pool + needs to be closed. If one of this requirements is not met the function + will return false. Otherwise the tables will be deleted via the + asynchronous job handler. + """ workload_type = workload["workload_type"] scale_factor = workload["scale_factor"] if workload_type not in self._workload_drivers: @@ -213,15 +272,24 @@ def deactivate_plugin(self, plugin: str) -> bool: return self._asynchronous_job_handler.deactivate_plugin(plugin) def get_database_blocked(self) -> bool: - """Return tables loading flag.""" + """Return database blocked flag. + + The database is blocked if we load/delete tables. + """ return bool(self._database_blocked.value) def get_worker_pool_status(self) -> str: - """Return worker pool status.""" + """Return worker pool status. + + A worker poll can have the status running and closed. + """ return self._worker_pool.get_status() def get_hyrise_active(self) -> bool: - """Return status of hyrise.""" + """Return status of Hyrise. + + This flag defines if the Hyrise instance is responsive or not. + """ return bool(self._hyrise_active.value) def get_loaded_tables_in_database(self) -> List[Dict[str, str]]: diff --git a/hyrisecockpit/database_manager/manager.py b/hyrisecockpit/database_manager/manager.py index c4b388d50..224951288 100644 --- a/hyrisecockpit/database_manager/manager.py +++ b/hyrisecockpit/database_manager/manager.py @@ -31,7 +31,6 @@ def __init__( db_manager_port: str, workload_sub_host: str, workload_pubsub_port: str, - default_tables: str, storage_host: str, storage_password: str, storage_port: str, @@ -40,7 +39,6 @@ def __init__( """Initialize a DatabaseManager.""" self._workload_sub_host = workload_sub_host self._workload_pubsub_port = workload_pubsub_port - self._default_tables = default_tables self._storage_host = storage_host self._storage_password = storage_password self._storage_port = storage_port @@ -116,7 +114,6 @@ def _call_add_database(self, body: Body) -> Response: "tcp://{:s}:{:s}".format( self._workload_sub_host, self._workload_pubsub_port, ), - self._default_tables, self._storage_host, self._storage_password, self._storage_port, diff --git a/tests/database_manager/test_database.py b/tests/database_manager/test_database.py index 3093031e7..e9a16769e 100644 --- a/tests/database_manager/test_database.py +++ b/tests/database_manager/test_database.py @@ -18,7 +18,6 @@ database_name: str = "MongoDB" number_workers: int = 42 workload_publisher_url: str = "lothar matthäus" -default_tables: str = "Watt_ihr_Volt" storage_host: str = "xulfni" storage_password: str = "1234" storage_port: str = "42" @@ -55,7 +54,6 @@ def database(self) -> Database: database_name, number_workers, workload_publisher_url, - default_tables, storage_host, storage_password, storage_port, @@ -89,7 +87,6 @@ def test_inintializes_database( database_name, number_workers, workload_publisher_url, - default_tables, storage_host, storage_password, storage_port, @@ -127,7 +124,6 @@ def test_inintializes_database( assert database._id == database_id assert database.number_workers == number_workers - assert database._default_tables == default_tables assert type(database._database_blocked) is ValueType assert type(database._hyrise_active) is ValueType assert database._hyrise_active.value diff --git a/tests/database_manager/test_database_manager.py b/tests/database_manager/test_database_manager.py index c74fbdc1d..969bc2bcb 100644 --- a/tests/database_manager/test_database_manager.py +++ b/tests/database_manager/test_database_manager.py @@ -10,7 +10,6 @@ DB_MANAGER_LISTENING = "listening_host" DB_MANAGER_PORT = "listening_port" -DEFAULT_TABLES = "default_tables" STORAGE_HOST = "storage_host" STORAGE_PASSWORD = "storage_password" STORAGE_PORT = "storage_port" @@ -84,7 +83,6 @@ def database_manager(self) -> DatabaseManager: DB_MANAGER_PORT, WORKLOAD_SUB_HOST, WORKLOAD_PUBSUB_PORT, - DEFAULT_TABLES, STORAGE_HOST, STORAGE_PASSWORD, STORAGE_PORT, @@ -119,7 +117,6 @@ def test_initializes(self, database_manager: DatabaseManager) -> None: assert isinstance(database_manager, DatabaseManager) assert database_manager._workload_sub_host == WORKLOAD_SUB_HOST assert database_manager._workload_pubsub_port == WORKLOAD_PUBSUB_PORT - assert database_manager._default_tables == DEFAULT_TABLES assert database_manager._storage_host == STORAGE_HOST assert database_manager._storage_port == STORAGE_PORT assert database_manager._storage_user == STORAGE_USER @@ -243,7 +240,6 @@ def test_call_add_database( "database_name", 8, "tcp://{:s}:{:s}".format(WORKLOAD_SUB_HOST, WORKLOAD_PUBSUB_PORT,), - DEFAULT_TABLES, STORAGE_HOST, STORAGE_PASSWORD, STORAGE_PORT, From c0d7c89231571455eac8adda8d34aff80989a064 Mon Sep 17 00:00:00 2001 From: Alexander-Dubrawski Date: Tue, 8 Sep 2020 12:27:02 +0200 Subject: [PATCH 5/7] Adjust system tests --- system_tests/cli_manager.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/system_tests/cli_manager.py b/system_tests/cli_manager.py index 306ccad0e..a70f3aaac 100644 --- a/system_tests/cli_manager.py +++ b/system_tests/cli_manager.py @@ -3,7 +3,6 @@ from system_tests.settings import ( DB_MANAGER_HOST, DB_MANAGER_PORT, - DEFAULT_TABLES, STORAGE_HOST, STORAGE_PASSWORD, STORAGE_PORT, @@ -21,7 +20,6 @@ def main() -> None: DB_MANAGER_PORT, WORKLOAD_SUB_HOST, WORKLOAD_PUBSUB_PORT, - DEFAULT_TABLES, STORAGE_HOST, STORAGE_PASSWORD, STORAGE_PORT, From f6fc150138020bb4f315307fbf71b80e401c3cdb Mon Sep 17 00:00:00 2001 From: Alexander-Dubrawski Date: Mon, 5 Oct 2020 12:16:46 +0200 Subject: [PATCH 6/7] Fix typos --- .../continuous_job_handler.py | 8 ++--- hyrisecockpit/database_manager/database.py | 31 ++++++++----------- .../test_continous_job_handler.py | 2 +- 3 files changed, 18 insertions(+), 23 deletions(-) diff --git a/hyrisecockpit/database_manager/continuous_job_handler.py b/hyrisecockpit/database_manager/continuous_job_handler.py index 1ecaa77a3..f5a7dc35f 100644 --- a/hyrisecockpit/database_manager/continuous_job_handler.py +++ b/hyrisecockpit/database_manager/continuous_job_handler.py @@ -41,14 +41,14 @@ def __init__( Args: connection_factory: An object to create a connection to the Hyrise - database. All connection relevant information (port, host) is + database. All information relevant for the connection (port, host) is saved in this object. hyrise_active: Flag stored in a shared memory map. This flag - stores if the Hyrise instance is responsive or not. + stores if the Hyrise instance is active or not. worker_pool: An object that manages the queue and task/queue workers. Its api is used to get the queue length. storage_connection_factory: An object to create a connection to the Influx - database. All connection relevant information (port, host) is + database. All information relevant for the connection (port, host) is saved in this object. database_blocked: Flag stored in a shared memory map. This flag stores if the Hyrise instance is blocked or not. @@ -167,7 +167,7 @@ def close(self) -> None: """Close background scheduler. Here we remove all jobs from the background scheduler and - shutting it down. + shut it down. """ self._update_workload_statement_information_job.remove() self._update_system_data_job.remove() diff --git a/hyrisecockpit/database_manager/database.py b/hyrisecockpit/database_manager/database.py index b61d5da43..3269721b1 100644 --- a/hyrisecockpit/database_manager/database.py +++ b/hyrisecockpit/database_manager/database.py @@ -25,8 +25,8 @@ class Database(object): The database class is a representation of a registered Hyrise instance. All the communication with the Hyrise is managed by this class and its API. A worker pool object that manages the queue and task/queue workers to put the - Hyrise under pressure is part of this class. This class also initializes the - influx database for the Hyrise instance. + Hyrise under pressure is part of this class. This class also initializes an + influx database for this Hyrise instance. """ def __init__( @@ -47,18 +47,18 @@ def __init__( """Initialize database object. Args: - id: A name representation of the Hyrise instance. + id: A string identifying the Hyrise instance. For example hyrise_1. user: The user that connects to the Hyrise instance. - password: The password needed to connects to the Hyrise instance. + password: The password needed to connect to the Hyrise instance. host: The address of the host machine where the Hyrise instance is running. port: The port on which the Hyrise instance is running. - dbname: The database name of the Hyrise instance. + dbname: The database name of the Hyrise instance. For example psql. number_workers: Number of task workers to put the Hyrise instance under - pressure. Every worker is running in its own process. + load. Every worker is running in its own process. workload_publisher_url: URL which is publishing the workload to the workers. This URL is used by the queue worker to connect to the workload generator via zeromq. - storage_host: Address in which the influx database is running. + storage_host: Address of the influx database instance. storage_password: Password to connect to the influx database. storage_port: Port of the influx database. storage_user: User of the influx database. @@ -127,9 +127,9 @@ def _initialize_influx(self) -> None: We drop the database inside influx that has the same name like the database id (Hyrise). We do that to remove all the data from previous - runs. Than we create a new database inside influx with the database id - (Hyrise). After that we create continuous query that the influx is running - every x seconds. For example it will so automatically calculate the throughput + runs. Then we create a new database inside influx with the database id + (Hyrise). After that we create a continuous query that the influx is running + every x seconds. For example, to automatically calculate the throughput per second. """ with self._storage_connection_factory.create_cursor() as cursor: @@ -221,8 +221,8 @@ def get_queue_length(self) -> int: def load_data(self, workload: Dict) -> bool: """Load pre-generated tables. - First we check if the workload and scale factor is valid. - For that workload need to have an equivalent driver and the + First, we check if the workload and scale factor is valid. + The workload needs to have an equivalent driver and the driver needs to support the scale factor. Moreover the worker pool needs to be closed. If one of this requirements is not met the function will return false. Otherwise the tables will be loaded via the @@ -246,12 +246,7 @@ def load_data(self, workload: Dict) -> bool: def delete_data(self, workload: Dict) -> bool: """Delete tables. - First we check if the workload and scale factor is valid. - For that workload need to have an equivalent driver and the - driver needs to support the scale factor. Moreover the worker pool - needs to be closed. If one of this requirements is not met the function - will return false. Otherwise the tables will be deleted via the - asynchronous job handler. + Same procedure like in load_data. """ workload_type = workload["workload_type"] scale_factor = workload["scale_factor"] diff --git a/tests/database_manager/test_continous_job_handler.py b/tests/database_manager/test_continous_job_handler.py index f1eda9cf5..e9c34060b 100644 --- a/tests/database_manager/test_continous_job_handler.py +++ b/tests/database_manager/test_continous_job_handler.py @@ -22,7 +22,7 @@ class TestContinuousJobHandler: - """This class test the continuous job handler.""" + """This class tests the continuous job handler.""" @patch("hyrisecockpit.database_manager.continuous_job_handler.BackgroundScheduler") def test_inintializes_continuous_job_handler( From 9b67225659884274fee0f5efdbbde3ed36401350 Mon Sep 17 00:00:00 2001 From: Alexander-Dubrawski Date: Mon, 5 Oct 2020 12:18:42 +0200 Subject: [PATCH 7/7] Fix typo --- hyrisecockpit/database_manager/database.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hyrisecockpit/database_manager/database.py b/hyrisecockpit/database_manager/database.py index 3269721b1..bb462fef4 100644 --- a/hyrisecockpit/database_manager/database.py +++ b/hyrisecockpit/database_manager/database.py @@ -224,7 +224,7 @@ def load_data(self, workload: Dict) -> bool: First, we check if the workload and scale factor is valid. The workload needs to have an equivalent driver and the driver needs to support the scale factor. Moreover the worker pool - needs to be closed. If one of this requirements is not met the function + needs to be closed. If one of this requirements is not met, the function will return false. Otherwise the tables will be loaded via the asynchronous job handler. """