From a6622ab1d5c3daac93df8e4462ce343e28b044b8 Mon Sep 17 00:00:00 2001 From: faturita Date: Thu, 27 Jun 2024 10:39:53 -0300 Subject: [PATCH] Update due to library version changes. --- filter_spatial.py | 4 +--- onepassclassifier.py | 2 +- onepassfeatureextraction.py | 2 +- signalfeatureclassification.py | 23 +++++++++++------------ 4 files changed, 14 insertions(+), 17 deletions(-) diff --git a/filter_spatial.py b/filter_spatial.py index 1aba3e5..1a68bfe 100644 --- a/filter_spatial.py +++ b/filter_spatial.py @@ -29,15 +29,13 @@ # The first plot with all the signals, as it may be in reality. plt.figure(1) -plt.title('Sinusoidal') plt.subplot(3,1,1) plt.plot(s1, color='red') -plt.title('Squared') plt.subplot(3,1,2) plt.plot(s2, color='steelblue') -plt.title('Sawtooth') plt.subplot(3,1,3) plt.plot(s3, color='orange') +plt.show() # Put all the signals together in a multichannel arrangement diff --git a/onepassclassifier.py b/onepassclassifier.py index 0656a99..96324c2 100644 --- a/onepassclassifier.py +++ b/onepassclassifier.py @@ -53,7 +53,7 @@ def unpickle_keypoints(array): keypoints = [] descriptors = [] for point in array: - temp_feature = cv2.KeyPoint(x=point[0][0],y=point[0][1],_size=point[1], _angle=point[2], _response=point[3], _octave=point[4], _class_id=point[5]) + temp_feature = cv2.KeyPoint(x=point[0][0],y=point[0][1],size=point[1], angle=point[2], response=point[3], octave=point[4], class_id=point[5]) temp_descriptor = point[6] keypoints.append(temp_feature) descriptors.append(temp_descriptor) diff --git a/onepassfeatureextraction.py b/onepassfeatureextraction.py index 5eb5314..fa432aa 100644 --- a/onepassfeatureextraction.py +++ b/onepassfeatureextraction.py @@ -53,7 +53,7 @@ def unpickle_keypoints(array): print ("Connecting..") -for i in range(1,20): +for i in range(1,2000): # Capture frame-by-frame ret, frame = cap.read() diff --git a/signalfeatureclassification.py b/signalfeatureclassification.py index 8235395..2c4eeec 100644 --- a/signalfeatureclassification.py +++ b/signalfeatureclassification.py @@ -3,8 +3,8 @@ Signal Feature Classification ========================================== -# Run with ann virtual environment -# EPOC Emotiv file format https://www.researchgate.net/publication/332514530_EPOC_Emotiv_EEG_Basics +# Run with a virtual environment with keras, sklearn, numpy and tensorflow installed. +# EPOC Emotiv file format https://arxiv.org/pdf/2206.09051 # OpemMP sometimes raises coredumps, try export KMP_DUPLICATE_LIB_OK=TRUE @@ -38,6 +38,14 @@ from scipy.signal import butter, lfilter +from sklearn.linear_model import LogisticRegression + +from keras.models import Sequential +from keras.layers import Dense + +import matplotlib.pyplot as plt + + def isartifact(window, threshold=80): # Window is EEG Matrix @@ -46,10 +54,6 @@ def isartifact(window, threshold=80): signalaverage = ameans.tolist() athresholds = np.asarray([threshold]*len(signalaverage)) - #print awindow - #print ameans - #print athresholds - # FIXME for t in range(0,len(window)): asample = (ameans+athresholds)-awindow[t] @@ -62,7 +66,7 @@ def isartifact(window, threshold=80): return False -import matplotlib.pyplot as plt + def butter_bandpass(lowcut, highcut, fs, order=5): nyq = 0.5 * fs @@ -296,8 +300,6 @@ def classify(afeatures1, afeatures2, featuresize): report = classification_report(testlabels, predlabels, target_names=target_names) print(report) - from sklearn.linear_model import LogisticRegression - # all parameters not specified are set to their defaults logisticRegr = LogisticRegression() logisticRegr.fit(trainingdata,traininglabels) @@ -315,9 +317,6 @@ def classify(afeatures1, afeatures2, featuresize): report = classification_report(testlabels, predlabels, target_names=target_names) print(report) - from keras.models import Sequential - from keras.layers import Dense - model = Sequential([ Dense(64, activation='tanh', input_shape=(trainingdata.shape[1],)), Dense(32, activation='tanh'),