Skip to content
This repository was archived by the owner on Apr 15, 2022. It is now read-only.

Commit b8059de

Browse files
authored
added sc and sqlcontext
1 parent 696a66a commit b8059de

File tree

1 file changed

+6
-2
lines changed

1 file changed

+6
-2
lines changed

splicemachine/ml/zeppelin.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -69,9 +69,11 @@ def create_new_run(self):
6969
self.run_uuid = None
7070

7171

72-
def show_confusion_matrix(TP, TN, FP, FN):
72+
def show_confusion_matrix(sc, sqlContext, TP, TN, FP, FN):
7373
"""
7474
function that shows you a device called a confusion matrix... will be helpful when evaluating. It allows you to see how well your model performs
75+
:param sc: Spark Context
76+
:param sqlCtx: SQL Context
7577
:param TP: True Positives
7678
:param TN: True Negatives
7779
:param FP: False Positives
@@ -113,8 +115,10 @@ class ModelEvaluator(object):
113115
A Function that provides an easy way to evaluate models once, or over random iterations
114116
"""
115117

116-
def __init__(self, label_column='label', prediction_column='prediction', confusion_matrix=True):
118+
def __init__(self, sc, sqlContext, label_column='label', prediction_column='prediction', confusion_matrix=True):
117119
"""
120+
:param sc: Spark Context
121+
:param sqlContext: SQLContext
118122
:param label_column: the column in the dataframe containing the correct output
119123
:param prediction_column: the column in the dataframe containing the prediction
120124
:param confusion_matrix: whether or not to show a confusion matrix after each input

0 commit comments

Comments
 (0)