From efd5a2f09e1846f088c34fddd7a51848d0c63e78 Mon Sep 17 00:00:00 2001 From: Szubie Date: Sat, 28 Sep 2024 21:29:49 +0100 Subject: [PATCH] Update tests to use data from fixtures instead of relying on sklearn --- tests/conftest.py | 170 ++++++++++++++++++ tests/data/test_knn.py | 10 +- tests/data/test_triplet_generator.py | 4 +- tests/integration/test_iris.py | 16 +- tests/integration/test_neighbour_matrix.py | 11 +- .../integration/test_semi-supervised_iris.py | 50 ++---- tests/integration/test_supervised.py | 81 +++------ tests/nn/test_callbacks.py | 22 +-- tests/nn/test_losses.py | 8 +- tests/test_model_saving.py | 24 +-- 10 files changed, 235 insertions(+), 161 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 7af0823..6591881 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +import numpy as np import pytest from tensorflow.keras import backend as K @@ -11,3 +12,172 @@ def clear_session_after_test(): yield if K.backend() == 'tensorflow' or K.backend() == 'cntk': K.clear_session() + +@pytest.fixture(scope="function") +def X(): + data = np.array( + [[5.1, 3.5, 1.4, 0.2], + [4.9, 3. , 1.4, 0.2], + [4.7, 3.2, 1.3, 0.2], + [4.6, 3.1, 1.5, 0.2], + [5. , 3.6, 1.4, 0.2], + [5.4, 3.9, 1.7, 0.4], + [4.6, 3.4, 1.4, 0.3], + [5. , 3.4, 1.5, 0.2], + [4.4, 2.9, 1.4, 0.2], + [4.9, 3.1, 1.5, 0.1], + [5.4, 3.7, 1.5, 0.2], + [4.8, 3.4, 1.6, 0.2], + [4.8, 3. , 1.4, 0.1], + [4.3, 3. , 1.1, 0.1], + [5.8, 4. , 1.2, 0.2], + [5.7, 4.4, 1.5, 0.4], + [5.4, 3.9, 1.3, 0.4], + [5.1, 3.5, 1.4, 0.3], + [5.7, 3.8, 1.7, 0.3], + [5.1, 3.8, 1.5, 0.3], + [5.4, 3.4, 1.7, 0.2], + [5.1, 3.7, 1.5, 0.4], + [4.6, 3.6, 1. , 0.2], + [5.1, 3.3, 1.7, 0.5], + [4.8, 3.4, 1.9, 0.2], + [5. , 3. , 1.6, 0.2], + [5. , 3.4, 1.6, 0.4], + [5.2, 3.5, 1.5, 0.2], + [5.2, 3.4, 1.4, 0.2], + [4.7, 3.2, 1.6, 0.2], + [4.8, 3.1, 1.6, 0.2], + [5.4, 3.4, 1.5, 0.4], + [5.2, 4.1, 1.5, 0.1], + [5.5, 4.2, 1.4, 0.2], + [4.9, 3.1, 1.5, 0.2], + [5. , 3.2, 1.2, 0.2], + [5.5, 3.5, 1.3, 0.2], + [4.9, 3.6, 1.4, 0.1], + [4.4, 3. , 1.3, 0.2], + [5.1, 3.4, 1.5, 0.2], + [5. , 3.5, 1.3, 0.3], + [4.5, 2.3, 1.3, 0.3], + [4.4, 3.2, 1.3, 0.2], + [5. , 3.5, 1.6, 0.6], + [5.1, 3.8, 1.9, 0.4], + [4.8, 3. , 1.4, 0.3], + [5.1, 3.8, 1.6, 0.2], + [4.6, 3.2, 1.4, 0.2], + [5.3, 3.7, 1.5, 0.2], + [5. , 3.3, 1.4, 0.2], + [7. , 3.2, 4.7, 1.4], + [6.4, 3.2, 4.5, 1.5], + [6.9, 3.1, 4.9, 1.5], + [5.5, 2.3, 4. , 1.3], + [6.5, 2.8, 4.6, 1.5], + [5.7, 2.8, 4.5, 1.3], + [6.3, 3.3, 4.7, 1.6], + [4.9, 2.4, 3.3, 1. ], + [6.6, 2.9, 4.6, 1.3], + [5.2, 2.7, 3.9, 1.4], + [5. , 2. , 3.5, 1. ], + [5.9, 3. , 4.2, 1.5], + [6. , 2.2, 4. , 1. ], + [6.1, 2.9, 4.7, 1.4], + [5.6, 2.9, 3.6, 1.3], + [6.7, 3.1, 4.4, 1.4], + [5.6, 3. , 4.5, 1.5], + [5.8, 2.7, 4.1, 1. ], + [6.2, 2.2, 4.5, 1.5], + [5.6, 2.5, 3.9, 1.1], + [5.9, 3.2, 4.8, 1.8], + [6.1, 2.8, 4. , 1.3], + [6.3, 2.5, 4.9, 1.5], + [6.1, 2.8, 4.7, 1.2], + [6.4, 2.9, 4.3, 1.3], + [6.6, 3. , 4.4, 1.4], + [6.8, 2.8, 4.8, 1.4], + [6.7, 3. , 5. , 1.7], + [6. , 2.9, 4.5, 1.5], + [5.7, 2.6, 3.5, 1. ], + [5.5, 2.4, 3.8, 1.1], + [5.5, 2.4, 3.7, 1. ], + [5.8, 2.7, 3.9, 1.2], + [6. , 2.7, 5.1, 1.6], + [5.4, 3. , 4.5, 1.5], + [6. , 3.4, 4.5, 1.6], + [6.7, 3.1, 4.7, 1.5], + [6.3, 2.3, 4.4, 1.3], + [5.6, 3. , 4.1, 1.3], + [5.5, 2.5, 4. , 1.3], + [5.5, 2.6, 4.4, 1.2], + [6.1, 3. , 4.6, 1.4], + [5.8, 2.6, 4. , 1.2], + [5. , 2.3, 3.3, 1. ], + [5.6, 2.7, 4.2, 1.3], + [5.7, 3. , 4.2, 1.2], + [5.7, 2.9, 4.2, 1.3], + [6.2, 2.9, 4.3, 1.3], + [5.1, 2.5, 3. , 1.1], + [5.7, 2.8, 4.1, 1.3], + [6.3, 3.3, 6. , 2.5], + [5.8, 2.7, 5.1, 1.9], + [7.1, 3. , 5.9, 2.1], + [6.3, 2.9, 5.6, 1.8], + [6.5, 3. , 5.8, 2.2], + [7.6, 3. , 6.6, 2.1], + [4.9, 2.5, 4.5, 1.7], + [7.3, 2.9, 6.3, 1.8], + [6.7, 2.5, 5.8, 1.8], + [7.2, 3.6, 6.1, 2.5], + [6.5, 3.2, 5.1, 2. ], + [6.4, 2.7, 5.3, 1.9], + [6.8, 3. , 5.5, 2.1], + [5.7, 2.5, 5. , 2. ], + [5.8, 2.8, 5.1, 2.4], + [6.4, 3.2, 5.3, 2.3], + [6.5, 3. , 5.5, 1.8], + [7.7, 3.8, 6.7, 2.2], + [7.7, 2.6, 6.9, 2.3], + [6. , 2.2, 5. , 1.5], + [6.9, 3.2, 5.7, 2.3], + [5.6, 2.8, 4.9, 2. ], + [7.7, 2.8, 6.7, 2. ], + [6.3, 2.7, 4.9, 1.8], + [6.7, 3.3, 5.7, 2.1], + [7.2, 3.2, 6. , 1.8], + [6.2, 2.8, 4.8, 1.8], + [6.1, 3. , 4.9, 1.8], + [6.4, 2.8, 5.6, 2.1], + [7.2, 3. , 5.8, 1.6], + [7.4, 2.8, 6.1, 1.9], + [7.9, 3.8, 6.4, 2. ], + [6.4, 2.8, 5.6, 2.2], + [6.3, 2.8, 5.1, 1.5], + [6.1, 2.6, 5.6, 1.4], + [7.7, 3. , 6.1, 2.3], + [6.3, 3.4, 5.6, 2.4], + [6.4, 3.1, 5.5, 1.8], + [6. , 3. , 4.8, 1.8], + [6.9, 3.1, 5.4, 2.1], + [6.7, 3.1, 5.6, 2.4], + [6.9, 3.1, 5.1, 2.3], + [5.8, 2.7, 5.1, 1.9], + [6.8, 3.2, 5.9, 2.3], + [6.7, 3.3, 5.7, 2.5], + [6.7, 3. , 5.2, 2.3], + [6.3, 2.5, 5. , 1.9], + [6.5, 3. , 5.2, 2. ], + [6.2, 3.4, 5.4, 2.3], + [5.9, 3. , 5.1, 1.8]] + ) + return data + +@pytest.fixture(scope="function") +def Y(): + target = np.array( + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2] + ) + return target diff --git a/tests/data/test_knn.py b/tests/data/test_knn.py index 44ad48a..973e941 100644 --- a/tests/data/test_knn.py +++ b/tests/data/test_knn.py @@ -50,13 +50,10 @@ def test_dense_annoy_index(annoy_index_file): loaded_index.unload() -def test_knn_retrieval(): +def test_knn_retrieval(X): annoy_index_filepath = 'tests/data/.test-annoy-index.index' expected_neighbour_list = np.load('tests/data/test_knn_k3.npy') - iris = datasets.load_iris() - X = iris.data - k = 3 search_k = -1 @@ -79,13 +76,10 @@ def test_knn_matrix_construction_params(annoy_index_file): for original_row, loaded_row in zip(index, loaded_index): assert original_row == loaded_row -def test_knn_retrieval_non_verbose(): +def test_knn_retrieval_non_verbose(X): annoy_index_filepath = 'tests/data/.test-annoy-index.index' expected_neighbour_list = np.load('tests/data/test_knn_k3.npy') - iris = datasets.load_iris() - X = iris.data - k = 3 search_k = -1 diff --git a/tests/data/test_triplet_generator.py b/tests/data/test_triplet_generator.py index 4906545..303de91 100644 --- a/tests/data/test_triplet_generator.py +++ b/tests/data/test_triplet_generator.py @@ -7,11 +7,9 @@ from ivis.data.generators import UnsupervisedTripletGenerator -def test_UnsupervisedTripletGenerator(): +def test_UnsupervisedTripletGenerator(X): neighbour_list = np.load('tests/data/test_knn_k3.npy') - iris = datasets.load_iris() - X = iris.data batch_size = 32 data_generator = UnsupervisedTripletGenerator(X, neighbour_list, diff --git a/tests/integration/test_iris.py b/tests/integration/test_iris.py index 5fdb4f5..9eae24d 100644 --- a/tests/integration/test_iris.py +++ b/tests/integration/test_iris.py @@ -2,24 +2,16 @@ from sklearn import datasets -def test_iris_embedding(): - iris = datasets.load_iris() - x = iris.data - y = iris.target - +def test_iris_embedding(X): ivis_iris = Ivis(epochs=5) ivis_iris.k = 15 ivis_iris.batch_size = 16 - y_pred_iris = ivis_iris.fit_transform(x) - -def test_1d_iris_embedding(): - iris = datasets.load_iris() - x = iris.data - y = iris.target + y_pred_iris = ivis_iris.fit_transform(X) +def test_1d_iris_embedding(X): ivis_iris = Ivis(epochs=5, embedding_dims=1) ivis_iris.k = 15 ivis_iris.batch_size = 16 - y_pred_iris = ivis_iris.fit_transform(x) + y_pred_iris = ivis_iris.fit_transform(X) diff --git a/tests/integration/test_neighbour_matrix.py b/tests/integration/test_neighbour_matrix.py index b216c09..5a06985 100644 --- a/tests/integration/test_neighbour_matrix.py +++ b/tests/integration/test_neighbour_matrix.py @@ -3,16 +3,13 @@ from ivis import Ivis -def test_custom_ndarray_neighbour_matrix(): - iris = datasets.load_iris() - x = iris.data - y = iris.target +def test_custom_ndarray_neighbour_matrix(X, Y): - class_indicies = {label: np.argwhere(y == label).ravel() for label in np.unique(y)} - neighbour_matrix = np.array([class_indicies[label] for label in y]) + class_indicies = {label: np.argwhere(Y == label).ravel() for label in np.unique(Y)} + neighbour_matrix = np.array([class_indicies[label] for label in Y]) ivis_iris = Ivis(epochs=5, neighbour_matrix=neighbour_matrix) ivis_iris.k = 15 ivis_iris.batch_size = 16 - y_pred_iris = ivis_iris.fit_transform(x) + y_pred_iris = ivis_iris.fit_transform(X) diff --git a/tests/integration/test_semi-supervised_iris.py b/tests/integration/test_semi-supervised_iris.py index 2d4cc2e..946092f 100644 --- a/tests/integration/test_semi-supervised_iris.py +++ b/tests/integration/test_semi-supervised_iris.py @@ -4,69 +4,55 @@ from ivis import Ivis -def test_iris_embedding(): - iris = datasets.load_iris() - x = iris.data - y = iris.target - mask = np.random.choice(range(len(y)), size=len(y) // 2, replace=False) - y[mask] = -1 +def test_iris_embedding(X, Y): + mask = np.random.choice(range(len(Y)), size=len(Y) // 2, replace=False) + Y[mask] = -1 ivis_iris = Ivis(epochs=5) ivis_iris.k = 15 ivis_iris.batch_size = 16 - y_pred_iris = ivis_iris.fit_transform(x, y) + y_pred_iris = ivis_iris.fit_transform(X, Y) -def test_correctly_indexed_semi_supervised_classificaton_classes(): - iris = datasets.load_iris() - x = iris.data - y = iris.target +def test_correctly_indexed_semi_supervised_classificaton_classes(X, Y): # Mark points as unlabeled - mask = np.random.choice(range(len(y)), size=len(y) // 2, replace=False) - y[mask] = -1 + mask = np.random.choice(range(len(Y)), size=len(Y) // 2, replace=False) + Y[mask] = -1 supervision_metric = 'sparse_categorical_crossentropy' ivis_iris = Ivis(k=15, batch_size=16, epochs=5, supervision_metric=supervision_metric) - embeddings = ivis_iris.fit_transform(x, y) - -def test_non_zero_indexed_semi_supervised_classificaton_classes(): - iris = datasets.load_iris() - x = iris.data - y = iris.target + embeddings = ivis_iris.fit_transform(X, Y) +def test_non_zero_indexed_semi_supervised_classificaton_classes(X, Y): # Make labels non-zero indexed - y = y + 1 + Y = Y + 1 # Mark points as unlabeled - mask = np.random.choice(range(len(y)), size=len(y) // 2, replace=False) - y[mask] = -1 + mask = np.random.choice(range(len(Y)), size=len(Y) // 2, replace=False) + Y[mask] = -1 supervision_metric = 'sparse_categorical_crossentropy' ivis_iris = Ivis(k=15, batch_size=16, epochs=5, supervision_metric=supervision_metric) with pytest.raises(ValueError): - embeddings = ivis_iris.fit_transform(x, y) - + embeddings = ivis_iris.fit_transform(X, Y) -def test_non_consecutive_indexed_semi_supervised_classificaton_classes(): - iris = datasets.load_iris() - x = iris.data - y = iris.target +def test_non_consecutive_indexed_semi_supervised_classificaton_classes(X, Y): # Make labels non-consecutive indexed - y[y == max(y)] = max(y) + 1 + Y[Y == max(Y)] = max(Y) + 1 # Mark points as unlabeled - mask = np.random.choice(range(len(y)), size=len(y) // 2, replace=False) - y[mask] = -1 + mask = np.random.choice(range(len(Y)), size=len(Y) // 2, replace=False) + Y[mask] = -1 supervision_metric = 'sparse_categorical_crossentropy' ivis_iris = Ivis(k=15, batch_size=16, epochs=5, supervision_metric=supervision_metric) with pytest.raises(ValueError): - embeddings = ivis_iris.fit_transform(x, y) + embeddings = ivis_iris.fit_transform(X, Y) diff --git a/tests/integration/test_supervised.py b/tests/integration/test_supervised.py index dfd6d24..f439d67 100644 --- a/tests/integration/test_supervised.py +++ b/tests/integration/test_supervised.py @@ -7,46 +7,35 @@ from ivis import Ivis -def test_1d_supervied_iris_embedding(): - iris = datasets.load_iris() - x = iris.data - y = iris.target - +def test_1d_supervied_iris_embedding(X, Y): ivis_iris = Ivis(epochs=2, embedding_dims=1) ivis_iris.k = 15 ivis_iris.batch_size = 16 - y_pred_iris = ivis_iris.fit_transform(x, y) + y_pred_iris = ivis_iris.fit_transform(X, Y) -def test_score_samples_unsupervised(): - iris = datasets.load_iris() - x = iris.data - y = iris.target +def test_score_samples_unsupervised(X): ivis_iris = Ivis(k=15, batch_size=16, epochs=2) - embeddings = ivis_iris.fit_transform(x) + embeddings = ivis_iris.fit_transform(X) # Unsupervised model cannot classify with pytest.raises(Exception): - y_pred = ivis_iris.score_samples(x) - + y_pred = ivis_iris.score_samples(X) -def test_score_samples(): - iris = datasets.load_iris() - x = iris.data - y = iris.target +def test_score_samples(X, Y): supervision_metric = 'sparse_categorical_crossentropy' ivis_iris = Ivis(k=15, batch_size=16, epochs=2, supervision_metric=supervision_metric) - embeddings = ivis_iris.fit_transform(x, y) - y_pred = ivis_iris.score_samples(x) + embeddings = ivis_iris.fit_transform(X, Y) + y_pred = ivis_iris.score_samples(X) # Softmax probabilities add to one, correct shape assert np.sum(y_pred, axis=-1) == pytest.approx(1, 0.01) - assert y_pred.shape[0] == x.shape[0] - assert y_pred.shape[1] == len(np.unique(y)) + assert y_pred.shape[0] == X.shape[0] + assert y_pred.shape[1] == len(np.unique(Y)) # Check that loss function and activation are correct loss_name = ivis_iris.model_.loss['supervised'].__name__ @@ -54,84 +43,64 @@ def test_score_samples(): assert ivis_iris.model_.layers[-1].activation.__name__ == 'softmax' -def test_correctly_indexed_classificaton_classes(): - iris = datasets.load_iris() - x = iris.data - y = iris.target - +def test_correctly_indexed_classificaton_classes(X, Y): supervision_metric = 'sparse_categorical_crossentropy' ivis_iris = Ivis(k=15, batch_size=16, epochs=2, supervision_metric=supervision_metric) - embeddings = ivis_iris.fit_transform(x, y) - + embeddings = ivis_iris.fit_transform(X, Y) -def test_non_zero_indexed_classificaton_classes(): - iris = datasets.load_iris() - x = iris.data - y = iris.target +def test_non_zero_indexed_classificaton_classes(X, Y): # Make labels non-zero indexed - y = y + 1 + Y = Y + 1 supervision_metric = 'sparse_categorical_crossentropy' ivis_iris = Ivis(k=15, batch_size=16, epochs=2, supervision_metric=supervision_metric) with pytest.raises(ValueError): - embeddings = ivis_iris.fit_transform(x, y) + embeddings = ivis_iris.fit_transform(X, Y) -def test_non_consecutive_indexed_classificaton_classes(): - iris = datasets.load_iris() - x = iris.data - y = iris.target - +def test_non_consecutive_indexed_classificaton_classes(X, Y): # Make labels non-consecutive indexed - y[y == max(y)] = max(y) + 1 + Y[Y == max(Y)] = max(Y) + 1 supervision_metric = 'sparse_categorical_crossentropy' ivis_iris = Ivis(k=15, batch_size=16, epochs=2, supervision_metric=supervision_metric) with pytest.raises(ValueError): - embeddings = ivis_iris.fit_transform(x, y) - + embeddings = ivis_iris.fit_transform(X, Y) -def test_invalid_metric(): - iris = datasets.load_iris() - x = iris.data - y = iris.target +def test_invalid_metric(X, Y): supervision_metric = 'invalid_loss_function' ivis_iris = Ivis(k=15, batch_size=16, epochs=2, supervision_metric=supervision_metric) # Loss function not specified with pytest.raises(ValueError): - embeddings = ivis_iris.fit_transform(x, y) - + embeddings = ivis_iris.fit_transform(X, Y) -def test_svm_score_samples(): - iris = datasets.load_iris() - x = iris.data - y = iris.target +def test_svm_score_samples(X, Y): supervision_metric = 'categorical_hinge' ivis_iris = Ivis(k=15, batch_size=16, epochs=2, supervision_metric=supervision_metric) # Correctly formatted one-hot labels train successfully - y = to_categorical(y) - embeddings = ivis_iris.fit_transform(x, y) + Y = to_categorical(Y) + embeddings = ivis_iris.fit_transform(X, Y) - y_pred = ivis_iris.score_samples(x) + y_pred = ivis_iris.score_samples(X) loss_name = ivis_iris.model_.loss['supervised'].__name__ assert losses.get(loss_name).__name__ == losses.get(supervision_metric).__name__ assert ivis_iris.model_.layers[-1].activation.__name__ == 'linear' assert ivis_iris.model_.layers[-1].kernel_regularizer is not None - assert ivis_iris.model_.layers[-1].output_shape[-1] == y.shape[-1] + assert ivis_iris.model_.layers[-1].output_shape[-1] == Y.shape[-1] def test_regression(): diff --git a/tests/nn/test_callbacks.py b/tests/nn/test_callbacks.py index 997bc7c..060359b 100644 --- a/tests/nn/test_callbacks.py +++ b/tests/nn/test_callbacks.py @@ -13,10 +13,7 @@ def log_dir(): yield temp_dir -def test_model_checkpoint(log_dir): - iris = datasets.load_iris() - X = iris.data - +def test_model_checkpoint(X, log_dir): filename = 'model-checkpoint_{}.ivis' n_epochs = 2 model = Ivis(epochs=n_epochs, k=15, batch_size=16, @@ -30,10 +27,7 @@ def test_model_checkpoint(log_dir): model_2.fit_transform(X) -def test_embeddings_logging(log_dir): - iris = datasets.load_iris() - X = iris.data - +def test_embeddings_logging(X, log_dir): filename = 'embeddings_{}.npy' n_epochs = 2 model = Ivis(epochs=n_epochs, k=15, batch_size=16, @@ -43,11 +37,7 @@ def test_embeddings_logging(log_dir): embeddings = np.load(os.path.join(log_dir, filename.format(n_epochs))) -def test_embeddings_image(log_dir): - iris = datasets.load_iris() - X = iris.data - Y = iris.target - +def test_embeddings_image(X, Y, log_dir): filename = 'embeddings_{}.png' n_epochs = 2 model = Ivis(epochs=n_epochs, k=15, batch_size=16, @@ -57,11 +47,7 @@ def test_embeddings_image(log_dir): assert os.path.exists(os.path.join(log_dir, filename.format(n_epochs))) -def test_embeddings_image(log_dir): - iris = datasets.load_iris() - X = iris.data - Y = iris.target - +def test_embeddings_image(X, Y, log_dir): n_epochs = 2 model = Ivis(epochs=n_epochs, k=15, batch_size=16, callbacks=[TensorBoardEmbeddingsImage(X, Y, log_dir, epoch_interval=1)]) diff --git a/tests/nn/test_losses.py b/tests/nn/test_losses.py index b0156c5..8379a5e 100644 --- a/tests/nn/test_losses.py +++ b/tests/nn/test_losses.py @@ -30,9 +30,7 @@ def custom_loss_fn(y_true, y_pred): assert custom_loss_fn is losses.loss_dict[custom_loss_fn.__name__] assert losses.triplet_loss(distance=custom_loss_fn.__name__) is custom_loss_fn -def test_custom_loss_ivis(model_filepath): - iris = datasets.load_iris() - X = iris.data +def test_custom_loss_ivis(X, model_filepath): def euclidean_loss(y_true, y_pred): margin = 1 @@ -51,9 +49,7 @@ def euclidean_loss(y_true, y_pred): with pytest.raises(ValueError): model_3.load_model(model_filepath) -def test_custom_loss_ivis_callable(model_filepath): - iris = datasets.load_iris() - X = iris.data +def test_custom_loss_ivis_callable(X, model_filepath): class EuclideanDistance: def __init__(self, margin=1): diff --git a/tests/test_model_saving.py b/tests/test_model_saving.py index b17da4c..aab840c 100644 --- a/tests/test_model_saving.py +++ b/tests/test_model_saving.py @@ -5,7 +5,6 @@ import dill from functools import partial -from sklearn import datasets import numpy as np import tensorflow as tf from ivis import Ivis @@ -34,10 +33,8 @@ def _validate_network_equality(model_1, model_2): model_2.model_.optimizer.get_weights()): assert np.all(w1 == w2) -def _unsupervised_model_save_test(model_filepath, save_fn, load_fn): +def _unsupervised_model_save_test(model_filepath, X, save_fn, load_fn): model = Ivis(k=15, batch_size=16, epochs=2) - iris = datasets.load_iris() - X = iris.data model.fit(X) save_fn(model, model_filepath) @@ -51,12 +48,9 @@ def _unsupervised_model_save_test(model_filepath, save_fn, load_fn): y_pred_2 = model_2.fit_transform(X) -def _supervised_model_save_test(model_filepath, save_fn, load_fn): +def _supervised_model_save_test(model_filepath, X, Y, save_fn, load_fn): model = Ivis(k=15, batch_size=16, epochs=2, supervision_metric='sparse_categorical_crossentropy') - iris = datasets.load_iris() - X = iris.data - Y = iris.target model.fit(X, Y) save_fn(model, model_filepath) @@ -73,10 +67,7 @@ def _supervised_model_save_test(model_filepath, save_fn, load_fn): y_pred_2 = model_2.fit_transform(X, Y) -def _custom_model_saving(model_filepath, save_fn, load_fn): - iris = datasets.load_iris() - X = iris.data - Y = iris.target +def _custom_model_saving(model_filepath, X, Y, save_fn, load_fn): # Create a custom model inputs = tf.keras.layers.Input(shape=(X.shape[-1],)) @@ -101,10 +92,7 @@ def _custom_model_saving(model_filepath, save_fn, load_fn): y_pred_2 = model_2.fit_transform(X, Y) -def _supervised_custom_model_saving(model_filepath, save_fn, load_fn): - iris = datasets.load_iris() - X = iris.data - Y = iris.target +def _supervised_custom_model_saving(model_filepath, X, Y, save_fn, load_fn): # Create a custom model inputs = tf.keras.layers.Input(shape=(X.shape[-1],)) @@ -188,10 +176,8 @@ def test_untrained_model_persistence(model_filepath): model = Ivis(k=15, batch_size=16, epochs=2) model.save_model(model_filepath) -def test_save_overwriting(model_filepath): +def test_save_overwriting(model_filepath, X): model = Ivis(k=15, batch_size=16, epochs=2) - iris = datasets.load_iris() - X = iris.data model.fit(X) model.save_model(model_filepath)