Skip to content

Commit

Permalink
Update tests
Browse files Browse the repository at this point in the history
  • Loading branch information
stefanDeveloper committed Oct 21, 2024
1 parent 1eac7cf commit 242d77e
Showing 1 changed file with 209 additions and 4 deletions.
213 changes: 209 additions & 4 deletions tests/test_inspector.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
from datetime import datetime, timedelta
from unittest.mock import MagicMock, patch
import numpy as np
import json

from streamad.model import ZScoreDetector, RShashDetector
from src.base import Batch
from src.inspector.inspector import Inspector, main

Expand Down Expand Up @@ -366,6 +368,84 @@ def test_inspect_univariate(
sut.inspect()
self.assertEqual([0, 0], sut.anomalies)

@patch("src.inspector.inspector.logger")
@patch("src.inspector.inspector.KafkaProduceHandler")
@patch("src.inspector.inspector.KafkaConsumeHandler")
@patch(
"src.inspector.inspector.MODELS",
[
{
"model": "ZScoreDetector",
"module": "streamad.model",
"model_args": {"window_len": 10},
}
],
)
@patch("src.inspector.inspector.TIME_TYPE", "ms")
@patch("src.inspector.inspector.TIME_RANGE", 1)
def test_inspect_univariate(
self, mock_kafka_consume_handler, mock_produce_handler, mock_logger
):
test_batch = get_batch(None)
test_batch.begin_timestamp = datetime.now()
test_batch.end_timestamp = datetime.now() + timedelta(0, 0, 2)
data = DEFAULT_DATA
data["timestamp"] = datetime.strftime(
test_batch.begin_timestamp + timedelta(0, 0, 1), TIMESTAMP_FORMAT
)
test_batch.data = [data]
mock_kafka_consume_handler_instance = MagicMock()
mock_kafka_consume_handler.return_value = mock_kafka_consume_handler_instance
mock_kafka_consume_handler_instance.consume_and_return_object.return_value = (
"test",
test_batch,
)
mock_produce_handler_instance = MagicMock()
mock_produce_handler.return_value = mock_produce_handler_instance

sut = Inspector()
sut.get_and_fill_data()
sut.inspect()
self.assertNotEqual([None, None], sut.anomalies)

@patch("src.inspector.inspector.logger")
@patch("src.inspector.inspector.KafkaProduceHandler")
@patch("src.inspector.inspector.KafkaConsumeHandler")
@patch(
"src.inspector.inspector.MODELS",
[
{"model": "ZScoreDetector", "module": "streamad.model", "model_args": {}},
{"model": "KNNDetector", "module": "streamad.model", "model_args": {}},
],
)
@patch("src.inspector.inspector.TIME_TYPE", "ms")
@patch("src.inspector.inspector.TIME_RANGE", 1)
def test_inspect_univariate_two_models(
self, mock_kafka_consume_handler, mock_produce_handler, mock_logger
):
test_batch = get_batch(None)
test_batch.begin_timestamp = datetime.now()
test_batch.end_timestamp = datetime.now() + timedelta(0, 0, 2)
data = DEFAULT_DATA
data["timestamp"] = datetime.strftime(
test_batch.begin_timestamp + timedelta(0, 0, 1), TIMESTAMP_FORMAT
)
test_batch.data = [data]
mock_kafka_consume_handler_instance = MagicMock()
mock_kafka_consume_handler.return_value = mock_kafka_consume_handler_instance
mock_kafka_consume_handler_instance.consume_and_return_object.return_value = (
"test",
test_batch,
)
mock_produce_handler_instance = MagicMock()
mock_produce_handler.return_value = mock_produce_handler_instance

sut = Inspector()
sut.get_and_fill_data()
sut.inspect()
self.assertEqual([0, 0], sut.anomalies)
self.assertTrue(isinstance(sut.model, ZScoreDetector))

@patch("src.inspector.inspector.logger")
@patch("src.inspector.inspector.KafkaProduceHandler")
@patch("src.inspector.inspector.KafkaConsumeHandler")
Expand Down Expand Up @@ -399,6 +479,43 @@ def test_inspect_multivariate(
sut.inspect()
self.assertEqual([0, 0], sut.anomalies)

@patch("src.inspector.inspector.logger")
@patch("src.inspector.inspector.KafkaProduceHandler")
@patch("src.inspector.inspector.KafkaConsumeHandler")
@patch(
"src.inspector.inspector.MODELS",
[
{"model": "RShashDetector", "module": "streamad.model", "model_args": {}},
{"model": "xStreamDetector", "module": "streamad.model", "model_args": {}},
],
)
@patch("src.inspector.inspector.MODE", "multivariate")
def test_inspect_multivariate_two_models(
self, mock_kafka_consume_handler, mock_produce_handler, mock_logger
):
test_batch = get_batch(None)
test_batch.begin_timestamp = datetime.now()
test_batch.end_timestamp = datetime.now() + timedelta(0, 0, 2)
data = DEFAULT_DATA
data["timestamp"] = datetime.strftime(
test_batch.begin_timestamp + timedelta(0, 0, 1), TIMESTAMP_FORMAT
)
test_batch.data = [data]
mock_kafka_consume_handler_instance = MagicMock()
mock_kafka_consume_handler.return_value = mock_kafka_consume_handler_instance
mock_kafka_consume_handler_instance.consume_and_return_object.return_value = (
"test",
test_batch,
)
mock_produce_handler_instance = MagicMock()
mock_produce_handler.return_value = mock_produce_handler_instance

sut = Inspector()
sut.get_and_fill_data()
sut.inspect()
self.assertEqual([0, 0], sut.anomalies)
self.assertTrue(isinstance(sut.model, RShashDetector))

@patch("src.inspector.inspector.logger")
@patch("src.inspector.inspector.KafkaProduceHandler")
@patch("src.inspector.inspector.KafkaConsumeHandler")
Expand Down Expand Up @@ -449,8 +566,16 @@ def test_inspect_ensemble(
@patch(
"src.inspector.inspector.MODELS",
[
{"model": "KNNDetector", "module": "streamad.model", "model_args": {}},
{"model": "SpotDetector", "module": "streamad.model", "model_args": {}},
{
"model": "KNNDetector",
"module": "streamad.model",
"model_args": {"window_len": 10},
},
{
"model": "SpotDetector",
"module": "streamad.model",
"model_args": {"window_len": 10},
},
],
)
@patch(
Expand All @@ -462,7 +587,7 @@ def test_inspect_ensemble(
},
)
@patch("src.inspector.inspector.MODE", "ensemble")
def test_inspect_ensemble_with_ts(
def test_inspect_ensemble_window_len(
self, mock_kafka_consume_handler, mock_produce_handler, mock_logger
):
test_batch = get_batch(None)
Expand All @@ -485,7 +610,51 @@ def test_inspect_ensemble_with_ts(
sut = Inspector()
sut.get_and_fill_data()
sut.inspect()
self.assertEqual([0, 0], sut.anomalies)
self.assertNotEqual([None, None], sut.anomalies)

@patch("src.inspector.inspector.logger")
@patch("src.inspector.inspector.KafkaProduceHandler")
@patch("src.inspector.inspector.KafkaConsumeHandler")
@patch(
"src.inspector.inspector.MODELS",
[
{"model": "RShashDetector", "module": "streamad.model", "model_args": {}},
{"model": "SpotDetector", "module": "streamad.model", "model_args": {}},
],
)
@patch(
"src.inspector.inspector.ENSEMBLE",
{
"model": "WeightEnsemble",
"module": "streamad.process",
"model_args": {"ensemble_weights": [0.6, 0.4]},
},
)
@patch("src.inspector.inspector.MODE", "ensemble")
def test_inspect_ensemble_invalid(
self, mock_kafka_consume_handler, mock_produce_handler, mock_logger
):
test_batch = get_batch(None)
test_batch.begin_timestamp = datetime.now()
test_batch.end_timestamp = datetime.now() + timedelta(0, 0, 2)
data = DEFAULT_DATA
data["timestamp"] = datetime.strftime(
test_batch.begin_timestamp + timedelta(0, 0, 1), TIMESTAMP_FORMAT
)
test_batch.data = [data]
mock_kafka_consume_handler_instance = MagicMock()
mock_kafka_consume_handler.return_value = mock_kafka_consume_handler_instance
mock_kafka_consume_handler_instance.consume_and_return_object.return_value = (
"test",
test_batch,
)
mock_produce_handler_instance = MagicMock()
mock_produce_handler.return_value = mock_produce_handler_instance

sut = Inspector()
sut.get_and_fill_data()
with self.assertRaises(NotImplementedError):
sut.inspect()

@patch("src.inspector.inspector.logger")
@patch("src.inspector.inspector.KafkaProduceHandler")
Expand Down Expand Up @@ -560,6 +729,42 @@ def test_invalid_mode(self, mock_kafka_consume_handler, mock_produce_handler):
sut.inspect()


class TestSend(unittest.TestCase):
@patch("src.inspector.inspector.KafkaProduceHandler")
@patch("src.inspector.inspector.KafkaConsumeHandler")
@patch("src.inspector.inspector.SCORE_THRESHOLD", 0.1)
@patch("src.inspector.inspector.ANOMALY_THRESHOLD", 0.01)
def test_send(self, mock_kafka_consume_handler, mock_produce_handler):
mock_kafka_consume_handler_instance = MagicMock()
mock_kafka_consume_handler.return_value = mock_kafka_consume_handler_instance
mock_produce_handler_instance = MagicMock()
mock_produce_handler.return_value = mock_produce_handler_instance

sut = Inspector()
sut.anomalies = [0.9, 0.9]
sut.X = np.array([[0.0], [0.0]])
sut.begin_timestamp = datetime.now()
sut.end_timestamp = datetime.now() + timedelta(0, 0, 2)
data = DEFAULT_DATA
data["timestamp"] = datetime.strftime(
sut.begin_timestamp + timedelta(0, 0, 1), TIMESTAMP_FORMAT
)
sut.messages = [data]
sut.send_data()

mock_produce_handler_instance.send.assert_called_once_with(
topic="Detector",
data=json.dumps(
{
"begin_timestamp": sut.begin_timestamp.strftime(TIMESTAMP_FORMAT),
"end_timestamp": sut.end_timestamp.strftime(TIMESTAMP_FORMAT),
"data": [data],
}
),
key="192.168.0.167",
)


class TestMainFunction(unittest.TestCase):
@patch("src.inspector.inspector.logger")
@patch("src.inspector.inspector.Inspector")
Expand Down

0 comments on commit 242d77e

Please sign in to comment.