forked from windmark/static-gesture-recognition
-
Notifications
You must be signed in to change notification settings - Fork 0
/
mainProgram.py
48 lines (41 loc) · 1.55 KB
/
mainProgram.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import time, sys
import states
import getDataLeapMotion
import dataProcessing
from training.training import Knn
# Get input from Leap Motion
def getDataFromLeapMotion():
# Get data from Leap Motion
rawLeftPalmData,rawRightPalmData,rawFingerData = getDataLeapMotion.start()
# Check if the Leap Motion found two hands
if min(rawLeftPalmData) == -999 or min(rawRightPalmData) == -999:
print "Couldn't find two hands!"
# If not, re run function
getDataFromLeapMotion()
else:
# Remove redundant (duplicates) finger data
rawFingerData2 = rawFingerData[0][:30]
# Orders data to featureVector format: right palm, right fingers, left palm, left fingers
# convertToFeatureVectors also normalizes data.
processedData = dataProcessing.convertToFeatureVectors([rawLeftPalmData],[rawRightPalmData],[rawFingerData2])
useClassifier(processedData[0])
# Throw samples at classifier
def useClassifier(sampledData):
#Classify should return a gesture as int in the range [0 to 7]
gesture = knn.classify(sampledData)
print(gesture)
runUI(gesture)
# Run states.py, the UI, with gesture
def runUI(gesture):
end = gs.GestureState(gesture)
if end != 99:
getDataFromLeapMotion()
else:
return True
# Create Gstates class from states.py
gs = states.Gstate()
# Initialize and run K-neares-neighbour
knn = Knn()
knn.loadModel('training/models/knnModel.pkl')
# Run program with gestures from Leap Motion
getDataFromLeapMotion()