Skip to content

Commit

Permalink
Updates from Old PC
Browse files Browse the repository at this point in the history
  • Loading branch information
lkev committed Jul 11, 2016
1 parent fbe3e70 commit 7094406
Show file tree
Hide file tree
Showing 3 changed files with 47 additions and 21 deletions.
17 changes: 9 additions & 8 deletions fault_vs_all_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,25 +5,25 @@
scada = Turbine.scada_data

# This gets all the data EXCEPT the faults listed. Labels as nf for "no-fault"
nf = Turbine.filter(scada,Turbine.status_data_wec, "Main_Status",
'fault_case_1', True, 600, 600, [62, 9, 80])
nf = Turbine.filter(scada, Turbine.status_data_wec, "Main_Status",
'fault_case_1', True, 600, 600, [62, 9, 80])
# feeding fault
ff = Turbine.filter(scada,Turbine.status_data_wec, "Main_Status",
ff = Turbine.filter(scada, Turbine.status_data_wec, "Main_Status",
'fault_case_1', False, 600, 600, 62)
# mains failure fault
mf = Turbine.filter(scada,Turbine.status_data_wec, "Main_Status",
mf = Turbine.filter(scada, Turbine.status_data_wec, "Main_Status",
'fault_case_1', False, 600, 600, 60)

# generator heating fault
gf = Turbine.filter(scada,Turbine.status_data_wec, "Main_Status",
gf = Turbine.filter(scada, Turbine.status_data_wec, "Main_Status",
'fault_case_1', False, 600, 600, 9)

# aircooling fault
af = Turbine.filter(scada,Turbine.status_data_wec, "Main_Status",
af = Turbine.filter(scada, Turbine.status_data_wec, "Main_Status",
'fault_case_1', False, 600, 600, 228)

# excitation fault
ef = Turbine.filter(scada,Turbine.status_data_wec, "Main_Status",
ef = Turbine.filter(scada, Turbine.status_data_wec, "Main_Status",
'fault_case_1', False, 600, 600, 80)

features = ['WEC_ava_windspeed',
Expand Down Expand Up @@ -69,7 +69,8 @@
# 'fault_case_1', True, 600,600,[62])

# label and split into train, test and balanced training data
xtrain, xtest, ytrain, ytest, xbaltrain, ybaltrain = Turbine.get_test_train_data(features, faults, nf)
xtrain, xtest, ytrain, ytest, xbaltrain, ybaltrain = \
Turbine.get_test_train_data(features, faults, nf)
# labels for confusion matrix
labels = ['no-fault', 'feeding fault', 'excitation fault', 'generator fault']
# train and test svm
Expand Down
22 changes: 17 additions & 5 deletions results_fault_diagnosis_1_v_all.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import winfault
import warnings
from sklearn.grid_search import GridSearchCV, RandomizedSearchCV

Turbine = winfault.WT_data()

Expand Down Expand Up @@ -49,14 +51,24 @@
# select the faults to include.
faults = [ff, ef, gf]

# warnings suppressed because there's loads of UndefinedMetricWarning's
warnings.filterwarnings("ignore")

# label and split into train, test and balanced training data
xtrain, xtest, ytrain, ytest, xbaltrain, ybaltrain = \
Turbine.get_test_train_data(features, faults, nf)

# labels for confusion matrix
labels = ['no-fault', 'feeding fault', 'excitation fault', 'generator fault']

# set the parameter space (class_weight is None for the balanced training data)
parameter_space = {
'kernel': ['linear', 'rbf', 'poly'], 'gamma': ['auto', 1e-3, 1e-4],
'C': [0.01, .1, 1, 10, 100, 1000],
'class_weight': [{0: 0.01}, {1: 1}, {1: 2}, {1: 10}, {1: 50}, 'balanced']}

# train and test svm
print("results for basic SVM")
winfault.svm_class_and_score(xbaltrain, ybaltrain, xtest, ytest, labels)
print("results for bagging SVM")
winfault.svm_class_and_score(xbaltrain, ybaltrain, xtest, ytest, labels,
bagged=True)
clf, bgg = winfault.svm_class_and_score(
xbaltrain, ybaltrain, xtest, ytest, labels,
parameter_space=parameter_space, bagged=True, score='precision_weighted',
search_type=GridSearchCV)
29 changes: 21 additions & 8 deletions winfault.py
Original file line number Diff line number Diff line change
Expand Up @@ -888,10 +888,11 @@ def get_test_train_data(
def svm_class_and_score(
X_train, y_train, X_test, y_test, labels, search_type=RandomizedSearchCV,
parameter_space={
'kernel': ['linear'], 'gamma': ['auto', 1e-3, 1e-4],
'kernel': ['linear', 'rbf', 'poly'], 'gamma': ['auto', 1e-3, 1e-4],
'C': [0.01, .1, 1, 10, 100, 1000],
'class_weight': [{0: 0.01}, {1: 1}, {1: 2}, {1: 10}, {1: 50}]},
score='recall_weighted', iid=True, bagged=False):
'class_weight': [
{0: 0.01}, {1: 1}, {1: 2}, {1: 10}, {1: 50}, 'balanced']},
score='recall_weighted', iid=True, bagged=False, svm_results=True):
"""Build an SVM and return its scoring metrics
"""
print("# Tuning hyper-parameters for %s" % score)
Expand All @@ -900,18 +901,30 @@ def svm_class_and_score(
# Find the Hyperparameters
clf = search_type(SVC(C=1), parameter_space, cv=10,
scoring=score, iid=iid)
if bagged is True:
clf = BaggingClassifier(base_estimator=clf)

# Build the SVM
clf.fit(X_train, y_train)
print("Hyperparameters found:")
print(clf.best_params_)

# Make the predictions
y_pred = clf.predict(X_test)

print()
print()
print("Results for basic SVM")
clf_scoring(y_test, y_pred, labels)

return clf
if bagged is True:
bgg = BaggingClassifier(base_estimator=clf)
bgg.fit(X_train, y_train)
y_pred = bgg.predict(X_test)
print()
print()
print("Results for bagging:")
clf_scoring(y_test, y_pred, labels)
return clf, bgg
else:
return clf


def clf_scoring(y_test, y_pred, labels):
Expand All @@ -925,7 +938,7 @@ def clf_scoring(y_test, y_pred, labels):
cm_normalized = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]

# Also print specificity metric
print("Specificity:", cm[0, 0] / (cm[0, 1] + cm[0, 0]))
# print("Specificity:", cm[0, 0] / (cm[0, 1] + cm[0, 0]))
print(cm)

# plot the confusion matrices
Expand Down

0 comments on commit 7094406

Please sign in to comment.