Professional Documents
Culture Documents
Supervised by
Dr Wasim Ahmed Khan
BY
Supervised by
Dr Wasim Ahmed Khan
Abstract ................................................................................................................................i
Table Of Contents................................................................................................................ii
Nomenclature.......................................................................................................................v
List Of Figures..................................................................................................................viii
List Of Tables.......................................................................................................................x
Chapter 1 Introduction.......................................................................................................11
1.1 Background And Motivation.........................................................................11
1.2 Problem Statement.........................................................................................11
1.3 Scope Of Work And Expected Outcome.......................................................11
1.4 Report Outline...............................................................................................12
1.5 Project Timeline............................................................................................12
1.6 Individual And Team Contribution...............................................................14
Chapter 2 Literature Review..............................................................................................15
2.1 Literature Review..........................................................................................15
2.1.1 EXOglove...................................................................................................15
2.1.2 RElab Tenoexo...........................................................................................15
2.1.3 Mano...........................................................................................................16
2.2 Inferences Drawn Out Of Literature..............................................................16
2.3 Summary........................................................................................................17
Chapter 3 Design And Analysis.........................................................................................18
3.1 Design Methodology.....................................................................................18
3.1.1 Methods For Extracting Motor Signals......................................................18
3.1.2 Concept Designs.........................................................................................19
3.1.2.1 Linkage Based Mechanism......................................................................19
3.1.2.2 Elastic-Bending Based Mechanism.........................................................20
3.1.2.3 Bowden-Cable Based Mechanism...........................................................21
3.1.3 Decision Matrix (Pugh Chart)....................................................................21
3.1.4 Final Design Conclusion............................................................................22
3.2 Mathematical Formulation............................................................................22
3.2.1 Free Body Diagram....................................................................................23
3.2.1.1 Distal:......................................................................................................23
3.2.1.2 Middle:....................................................................................................23
3.2.1.3 Proximal:.................................................................................................24
3.2.2 Mathematical Model:..................................................................................25
3.2.3 Rack And Pinion Calculations:..................................................................33
3.3 Geometric Modelling And Design................................................................34
3.3.1 Bowden-Cable Based Mechanism..............................................................34
3.3.1.2 The Distal Ring........................................................................................35
3.3.1.3 The Cap...................................................................................................36
3.3.1.4 Junction Pieces:.......................................................................................36
3.3.1.5 Rack And Pinion......................................................................................37
3.3.1.6 Actuator Assembly Unit..........................................................................38
3.3.2 Isometric View Of The Assembly..............................................................40
3.4 Machine Learning..........................................................................................40
3.4.1 AI Algorithms:............................................................................................41
3.5 Summary........................................................................................................42
Chapter 4 Physical Model Development & Testing..........................................................43
4.1 Development Process....................................................................................43
4.1.1 Custom Designed Components:.................................................................43
4.1.2 Pre-Made Components:..............................................................................43
4.1.3 Complete Physical Model:.........................................................................44
4.2 Integration & Instrumentation.......................................................................45
4.2.1 Sensor And Electronics:.............................................................................46
4.2.2 Data Acquisition:........................................................................................48
4.2.2.1 Setting Up The Sensors:..........................................................................48
4.2.2.2 Reading & Sending The Data - Arduino:................................................49
4.2.2.3 Reading & Storing The Data - Python:...................................................49
4.2.2.4 Visualizing The Data- Python:................................................................51
4.2.2.5 Emg Signals For Various Movements:....................................................52
4.2.3 Data Handling:............................................................................................54
4.2.3.1 Feature Extraction:..................................................................................54
4.2.3.2 Visualizing The Data:..............................................................................57
4.2.3.3 Training The Data:..................................................................................59
4.3 Testing/Experimental Procedures..................................................................60
4.4 Summary........................................................................................................63
Chapter 5 Results & Discussion.........................................................................................64
5.1 Results...........................................................................................................64
5.2 Analysis And Discussions.............................................................................65
5.3 Summary........................................................................................................65
Chapter 6 Impact And Economic Analysis........................................................................66
6.1 Social Impact.................................................................................................66
6.2 Sustainability Analysis..................................................................................67
6.2.1 Economic Sustainability:............................................................................67
6.2.2 Impact Sustainability:.................................................................................67
6.2.3 Scope Of Implementation:..........................................................................68
6.2.4 Climate Conditions Suitability:..................................................................68
6.3 Environmental Impact...................................................................................68
6.3.1 Materials:....................................................................................................68
6.3.1.1 ABS Plastic:.............................................................................................68
6.3.1.2 PLA..........................................................................................................69
6.3.2 Processing Methods:...................................................................................70
6.3.2.1 3d Printing:..............................................................................................70
6.4 Sustainable Development Goals (SDGs).......................................................72
6.5 Summary........................................................................................................74
Chapter 7 Conclusion And Future Recommendations.......................................................75
7.1 Conclusions...................................................................................................75
7.2 Future Recommendations..............................................................................75
Appendices.........................................................................................................................77
References........................................................................................................................102
Nomenclature
EEG Electroencephalography
EOG Electrooculography
EMG Electromyography
MI Motor Imagery
θ1 Angle of curl of the distal phalange with respect to the middle phalange
θ2 Angle of curl of the middle phalange with respect to the proximal phalange
CC +¿¿
MM Moment about the middle phalange, positive in counterclockwise direction
W Weight of object
F. S Factor of Safety
LiPo Lithium-Polymer
OVO One-vs-One
OVA One-vs-All
1
by EMG sensors present in Myo armband which is processed by Arduino Uno through
machine learning. The hand exoskeleton will be used for rehabilitation purposes by
compensating for the deficit force provided by the hand and not for incrementing it.
The limitations of the exoskeleton include, lack of wrist and arm control, less precision in
position control of fingers and no haptic feedback.
2
Fig 1-1: Project Timeline
3
1.6 Individual and Team Contribution
Table 1-1: Individual and Team Contributions in the Project
Literature Review Hagera
Maheen
Mubashir
Report Hagera
Maheen
Mubashir
4
CHAPTER 2
LITERATURE REVIEW
5
Myo Armband using dry surface EMG electrodes encased in the form of an armband, and
it essentially reduces the complications caused by conventional EMG kits.
2.1.3 Mano
Randazzo et al. (2018) worked on a wearable hand exoskeleton sufficient to perform
daily life activities such as grasping for eating and drinking, and miscellaneous actions.
The exoskeleton is actuated by Bowden cables such that the thumb, index and
intermediate fingers have independent actuation, while the ring and little fingers have a
combined actuation. A chest-pack housing holds the battery, linear actuators, and
microcontroller unit (Arduino Mega). The unique aspect of this exoskeleton is that the
structural design covers minimal area on the palm, fingertips and forearm in order to
preserve the natural somatosensory capabilities. This is achieved by using Bowden cables
as the structural supports for the exoskeleton, along with their use as the actuation
elements.[6]
This study also aimed at determining whether motor imagery (MI) commands extracted
only from electro encephalography (EEG) signals could be used to continuously control
this exoskeleton hand. Although the conclusions suggested that the exoskeleton aided
users in performing MI tasks, this was not made part of the original Mano design due to
the uncertainty whether it was solely due to the exoskeleton’s actuation or due to
feedback from the MI.
6
Fig 2-2: Electrode Positions for EEG Headset
Sensors based on EEG activity are generally complicated to set up because various
electrodes are placed at different locations of scalp, depending on the state of desired
mind activity. In many cases, it may become difficult for patients to accurately place
these electrodes without professional assistance. Some EEG headsets also require wearing
a head cover, which may be reasonably tight and becomes uncomfortable over a
prolonged period of time.
Although EMG sensor kits also have complications of their own, namely the excessive
cables and filtering of signals, but they are certainly less than those posed by an EEG
headset. The inconveniences caused by conventional EMG kits is why the idea of
recording EMG signals through the Myo Armband is a feasible option of countering
them, as a stable Bluetooth connection is the only pre-requisite to be fulfilled [3].
Additionally, Machine Learning can be implemented in this project for the purpose of
deploying a training module to recognize all the input signals of hand motions. Then the
exoskeleton hand would perform desired actions when it recognizes a specific input
signal. This training module allows the exoskeleton hand to adapt to multiple users [3].
2.3 Summary
This chapter explored a brief review of research on three different models of exoskeleton
hand, and the inferences drawn from them manifest the basis of our own design. The
literature will also form the ground in deciding multiple parameters such as hardware,
materials, simplicity, adaptability, power requirement, weight, and fabrication methods.
The design of exoskeleton hand we aim to achieve is partly a combination of RELab
Tenoexo and Mano, incorporating the use of Myo Armband from the former and Bowden
cables as structural support from the latter, with design modifications of our own to
encompass uniqueness as well. In essence, RELab Tenoexo and Mano serve as a
reference model and benchmark for our project.
7
CHAPTER 3
DESIGN AND ANALYSIS
8
into a useful command for the microcontroller. The signal strength has the range of
250-1000 V.
9
geometrically restricted and allows one degree of freedom for each finger, therefore, five
in total. Thus, if the angle of rotation of the crank is known, the position of everything
else is determinable.
This mechanism is capable of exerting great force and is robust, however, the design has
its limitations. Since the nature of the rotation of the phalanges is always the same, the
mechanism does not adapt to the shape of an object being handled.
10
3.1.2.3 Bowden-cable Based Mechanism
11
Table 3-2: Mechanism Comparison (Pugh Chart)
Weight -1 +1 +1
Cost -1 0 +1
Ease of Fabrication -1 0 +1
Ease of assembly -1 0 +1
Power Requirement -1 0 +1
Ergonomic Design -1 +1 +1
Adaptability -1 +1 +1
Feasibility 0 -1 +1
Mechanical Losses 0 +1 -1
Possible Irregularities +1 0 -1
Soft Robotics -1 +1 +1
Total -7 +4 +7
*The datum is set to zero. ‘+1’ sign means positive inclusion and ‘-1’ means negative inclusion and 0 means neutral.
12
Let us begin by developing the free-body diagram of a finger. Here we have considered
that the individual phalanges of the fingers are links (because they are connected by a
revolute joint).
3.2.1.1 Distal:
13
3.2.1.2 Middle:
3.2.1.3 Proximal:
14
Fig 3-8: A rigid body held by the finger
This object is assumed to be a rigid body, and its shape is defined by a polygon. The
vertices of this polygon are in contact with the finger.
This object must stay in equilibrium; therefore, the free body diagram becomes:
The FBD shows the finger along with its angles of curl. θ1 shows the angle of curl of the
distal phalange with respect to the middle phalange and θ2 represent the angle of curl of
the middle phalange with respect to the proximal phalange. For simplification, the lengths
15
of the middle and proximal phalanges are taken equal, and the length of the distal
phalange is taken to be half of that.
The forces exerted on the finger are taken to be on the middle of the links. As a
consequence, an isosceles triangle is formed where the two angles and sides are equal,
thus, α n becomes the adjacent angle. β n represents the angle of the forces with respect to
the polygon. The forces are perpendicular because they are contact forces.
( ) ( ) ( )( L2 ) cos (180−θ )
2 2
2 L L L
l n= + −2 n (3.7)
2 2 2
l =2 ( ) ( 1+cos ( θ ) )
2
2 L
n n (3.8)
2
16
() ( ) θ2
( ) (1+ cos (θ )) × cos ( 2 )
θ1
2 2
L L
2
F P ×2
2
( 1+cos ( θ 2 ) ) ×cos 2 2
2
=F D × 2
2 1
2
(3.9)
( 2)
θ
√
2
cos
(1+cos ( θ2 ) ) 0
F =F × 0 ≤θ 1 ,2 ≤ 90 (3.10)
cos ( )
D P
θ 1 (1+cos ( θ1 ) )
2
We must also balance the forces, for this, we will situate our coordinate axis on F D ,
Σ F x =0 (3.11)
F M =F P (3.13)
cos ( θ1−90 )
0
Σ F y =0 (3.14)
FD+ FP
( cos ( θ1 +θ 2−900 )
cos ( θ1 −90
0
) ) sin ( θ1−90 )−F P sin ( θ1+ θ2−90 )=0
0 0
(3.15)
FD+ FP
( cos ( θ1 +θ 2−900 )
cos ( θ1 −90
0
)
sin ( θ1 −90 ) −sin ( θ1 +θ 2−90
0 0
) ) =0 (3.16)
(
F D =F P sin ( θ1+ θ2−90 )−
0
cos ( θ1 +θ2−90 0 )
cos ( θ1−90
0
)
sin ( θ1−90
0
) ) (3.17)
F D =F P
( sin ( θ1 +θ2 )
sin ( θ1 )
cos ( θ1 ) −cos ( θ1 +θ 2)
) (3.18)
( 2)
θ
√
2
cos
17
Let θ1 = x and θ2 = y
18
Fig 3-11: Relation between the angles
As it turns out, when both the moment and the forces are balanced (according to our
model), the angles turn out to be equal:
θ=θ1=θ2 (3.20)
Substitute eq .(3.20) into eq .(3.10):
( θ2 ) (1+cos ( θ ) )
cos ( ) √
F P∗cos
FD= (3.21)
θ ( 1+cos ( θ ) )
2
F D =F P (3.22)
Substitute eq .(3.20) into eq .(3.13):
cos ( 2 θ−900 )
F M =F P (3.23)
cos ( θ−90 )
0
sin ( 2θ )
F M =F P (3.24)
sin (θ)
2 sinθcosθ
F M =F P (3.25)
sin (θ)
F M =2∗F P cosθ → eq . E (3.26)
μ F M + μ F P+ μF D=W (3.27)
μ(F ¿ ¿ M + F P + F D )=W ¿ (3.28)
W
FM + FP+ FD= (3.29)
μ
19
The table below shows the contribution of individual fingers [4]:
Table 3-3: Contribution of each finger in a grip [4]
Finger Percentage of Total Grip Strength
Index 40-45%
Middle 20-25%
Ring 10-15%
Pinky 5-10%
Thumb 20-25%
20
Fig 3-12: Relation between force and angle
Fp = y & θ = x (3.36)
As it can be seen, the Force increases with the angle. The maximum Force exists at π /2.
F P |π =15.75 N (3.37)
2
Now let us consider the Force exerted by the mechanism. Consider the FBD of the distal
phalange (link). Balance Moment about the link’s hinge:
Σ M 0 =0 (3.39)
F∗d =F D∗L1 (3.40)
F D∗L1
F= → eq . F (3.41)
d
Referring to Appendix D, length distal moment arm is d=6.5 mm and link length
L1=13.5 mm.
−3
15.75∗13.55∗10
F= −3 (3.42)
6.5∗10
F=32.832 N (3.43)
Consider the Bowden cable only:
21
Let F R be the force exerted on the Bowden cable at the end of it.
The friction force in a Bowden Cable is given by [2]:
−μφ
T out =T ¿ e ∧μ=0.075 (3.44)
The maximum bend angle (due to bend in elbow) according to design is φ=π .
32.832
FR= −0.075∗π (3.46)
e
F R =41.555 N (3.47)
Applying Euler’s column equation for buckling for the third case:
22
Fig 3-15: Buckling of Bowden Cable
2
π EI
P R= =F . S × F R
()
2
L (3.48)
2
Where E is the modulus of elasticity and I is the moment of inertia of the cross-section of
the cable.
23
Where h is the distance from the center of the cross-section to the center of an outer
strand.
d d
h= + =d (3.54)
2 2
( )
2
πd 2
I total=7 × I strand +6 × (d ) (3.55)
4
=7 × d +6∗(
4 )
4
π πd
4
I total (3.56)
64
=7 × d +6∗(
4 )
4
π πd
4
I total (3.57)
64
= ( ) ( 167 +6)
−3 4
π 1.5∗10
I total (3.58)
64 3
−14 4
I total=1.975 ×10 m (3.59)
π 2 ( 183.9∗109 ) ×(1.975∗10−14 )
P R=
( ) (3.60)
−2 2
15∗10
2
P R=101.963 N (3.61)
101.963
F . S= =2.45 (3.62)
41.555
3.2.3 Rack and Pinion Calculations:
Force required: F R =41.555 N
Servomotor Torque: τ =1.2 Nm (4.8 V ),1.4 Nm (7.2V )
Radius of Pinion:
τ =F R ×r (3.63)
τ
r= (3.64)
FR
1.2+1.4
2 (3.65)
r=
41.555
r =0.03128 m (3.66)
∴ r=31.28 mm (3.67)
24
Length of Rack:
Since the motor can rotate only 180 °,
S=rθ (3.68)
(
S= ( 31.28 ) 180 ° ×
π
180 ° ) (3.69)
S=98.269 mm (3.70)
∴ S=100 mm (3.71)
Design of Rack and Pinion:
Radius of Pinion=r =30 mm (3.72)
Pitch diameter of Pinion=d =60 mm (3.73)
No . of teeth=N=20 (3.74)
d
Module=m= (3.75)
N
60
m= (3.76)
20
m=3 (3.77)
πd
Circular pitch of Pinion=p= (3.78)
N
π ( 60 )
p= (3.79)
20
p=9.25 mm (3.80)
p
Thickness of teeth=t= (3.81)
2
9.425
t= (3.82)
2
t=4.7125 mm (3.83)
Pitch Height of Rack=10 mm (3.84)
Mounting Distance=r + H=40 mm (3.85)
addendum=a=m=3 mm (3.86)
dedendum=b=1.25 a=3.75 mm (3.87)
m
fillet radius=r f = =1 mm (3.88)
3
face width=c=9 m<c <14 m (3.89)
25
c=12 mm (3.90)
˚
addendum radius=r a=r + a=33 mm (3.91)
˚
dedendum radius=r d =r−b=26.25 mm (3.92)
Base Circle:
Pressure Angle=ϕ=20 ° (3.93)
r b =r cos ϕ=28.19 mm (3.94)
d b =d cos ϕ=56.38 mm (3.95)
pb= p cos ϕ=8.856 mm (3.96)
The final design’s mechanism consists of three main parts which are repeated for each
finger.
26
3.3.1.2 The Distal Ring
27
3.3.1.4 Junction Pieces:
Junction pieces serve to align the Bowden cables in their respective path, preventing
entanglement of the cables.
28
Fig 3-22: Rack gear for thumb/index/middle finger cable
Fig 3-23: Rack gear for ring and little finger combined cable
The shape of servo motor’s horn gear is cut into one side of the pinion gear’s face. This is
done to ensure proper locking of the pinion with the servo motor.
29
Fig 3-24: Pinion Gear
With all the components in place, the assembled unit looks like this:
30
Fig 3-26: Assembled Actuator Unit
31
The input data to the machine learning algorithm will include sensor readings from the
user's arm and hand. The algorithm will be responsible for interpreting this data and
making predictions about the user's intended finger movements.
Once the algorithm has been trained, it will be integrated into the overall control system
of the exoskeleton hand. The algorithm will continuously analyze sensor data in real-time
and make adjustments to the torques of the motors to produce the desired finger
movements. Over time, the algorithm may be able to adapt to the specific movements and
intentions of individual users, improving the overall performance and usability of the
exoskeleton hand.
3.4.1 AI Algorithms:
Some of the most widely used AI Algorithms for EMG are:
Linear discriminant analysis (LDA):
A technique that projects high-dimensional data onto a lower-dimensional space, while
maximizing the separation between different classes. It can be used for pattern
recognition and classification of EMG signals.
Support vector machines (SVM): A technique that finds the best decision boundary
between different classes in a dataset. It can be used for classification and regression
problems, including EMG signal processing.
Artificial neural networks (ANN): A family of algorithms that use layers of
interconnected nodes (neurons) to learn complex patterns in data. They can be used for
various tasks in EMG signal analysis, such as feature extraction, classification, and
segmentation.
Convolutional neural networks (CNN): A type of neural network that is specialized for
image and video processing. It can be used for processing EMG signals that are
represented as images or spectrograms.
Recurrent neural networks (RNN): A type of neural network that is specialized for
sequential data, such as time series or text. It can be used for analyzing EMG signals that
have temporal dependencies.
32
Deep belief networks (DBN): A type of neural network that consists of multiple layers
of restricted Boltzmann machines. It can be used for unsupervised feature learning and
classification of EMG signals.
These AI algorithms can be used individually or in combination with each other to
achieve better performance in EMG signal analysis tasks. The choice of algorithm
depends on the specific problem at hand, the available data, and the desired output or
decision.
Based on the Pugh table, three machine learning algorithms appear to be well-suited for
the given EMG signal processing task: SVM, LDA, and RNN. SVM has the highest total
score of 18, followed by LDA with a score of 15, and RNN with a score of 13. Based on
the scores alone, SVM is the most suited, however, it is to be noted that the choice of
algorithm depends upon the nature of data as well.
3.5 Summary
This chapter discusses the design methodology which includes methods for extracting
motor signals, and concept designs for the exoskeleton hand. The three methods for
extracting motor signals are examined based on the use of EEG, EOG, and EMG, after
which it is concluded that EMG is the most suitable for this project. Then three concept
designs are evaluated – a linkage-based mechanism, an elastic-bending-based mechanism,
and a Bowden-cable-based mechanism. The Bowden-cable-based mechanism is selected
by considering various factors on a Pugh chart. The mathematical model is developed
which gives us the critical force at the end, or the maximum force up till which the
Bowden cable will not buckle. To select the most suitable machine learning algorithm for
the device, a Pugh chart is used to compare various algorithms based on several factors
such as accuracy, ease of implementation, and computational complexity. The chart
concludes that Support Vector Machine (SVM) is the best algorithm for the device due to
33
its high accuracy, low computational complexity, and ease of implementation. Therefore,
SVM is selected as the machine learning algorithm for the finger movement device.
34
CHAPTER 4
PHYSICAL MODEL DEVELOPMENT & TESTING
35
the desired voltage to power all the electronics involved. A ready-made glove with a good
grip was used as the base on which the entire exoskeleton was built. Finally, a compact
shoulder bag with straps is used to securely carry the housing.
The housing carries all of the electronics, motor with rack and pinion assembled units,
and the Bowden cables secured at one end of the rack.
36
Fig 4-2: Housing
37
4.2.1 Sensor and Electronics:
Although initially we planned to use the Myo Armband for recording EMG signals, we
faced a setback because of the unavailability of its brand new model. A 2nd hand model
was found but missing a crucial USB receiver. Additionally, the discontinuity of this
armband from its manufacturers is another reason that lead us to choose an alternative.
We chose a muscle sensor module compatible with Arduino to proceed with recording
EMG signals. Three such sensor modules are employed altogether to provide 3 channels
of measuring EMG for greater accuracy. It works on a dual power supply of ±9V, with
3.5V being its minimum working voltage.
38
The MG995 is a high speed and high torque motor that can provide precise rotation over
180°. It is a good option for this Bowden cable-based exoskeleton hand because of its
good performance, compact size and weight for a reasonable price. It uses the PWM input
signal of the Arduino board and operates within a voltage range of 4.8 -7.2 V.
39
The XL4015 is a Buck Converter used to step down a DC voltage of input range 5-36V to
an output range of 1.25-32V. The maximum voltage of the LiPo battery can go up to
8.4V, which is above the operating range of the MG995 servo motors. Hence, this buck
converter comes into play to step down the battery voltage within the operating range of
servo motors.
40
Fig 4-9: Wiring Diagram
41
To read the data, we first imported the PySerial module and created a serial object that
pointed to the appropriate COM port where the Arduino board was connected. We then
opened the serial port and used a while loop to continuously read the incoming data.
In this code, we first imported the PySerial and CSV modules to enable serial
communication between the Arduino board and our computer and to save the data in a
CSV file. We then created a serial object that points to the COM port where the Arduino
board is connected and opened the serial port.
The while loop is used to continuously read the incoming data from the EMG sensors. If
the serial object is waiting for incoming data, it reads a line from the serial buffer,
decodes it, and strips any leading or trailing whitespace characters. The EMG value is
then converted from a string to an integer and stored in a variable.
The elapsed time since the start of data collection is calculated using the time() function,
and the elapsed time and EMG value are then written to the CSV file using the
csv_writer.writerow() function.
It's worth noting that the delimiter used in the CSV file is a tab character \t, as specified in
the delimiter parameter of the csv.writer() function.
42
Overall, this code is a simple and effective way to collect and store data from EMG
sensors using an Arduino board and Python. By combining this code with data processing
and visualization techniques, we can gain valuable insights into muscle activity and
function for various applications.
Next, we read the CSV file containing the EMG data using Pandas. We then selected a
section of the data where there was activity by using the iloc() function of Pandas.
Specifically, we extracted the EMG values and the time values.
After extracting the relevant data, we used Matplotlib to plot the EMG values. The plotted
graph allowed us to visualize the muscle activity and function.
Finally, we plotted the entire file using the same process. These data visualization
techniques allowed us to explore and analyze the EMG data in a graphical format,
providing a better understanding of the muscle activity and function. This information can
be used in a wide range of applications, such as rehabilitation, sports performance, and
medical diagnosis.
43
Fig 4-13: Plotting the entire file – Python
44
Fig 4-15: EMG signal for Middle-finger extension
45
Fig 4-17: Active portion of EMG-signals for hand-extension
46
Skewness tells us whether a dataset is lopsided or symmetrical. It measures the extent and
direction of the imbalance in the data. If the tail of the distribution stretches more to the
right, it is positively skewed, while if it stretches more to the left, it is negatively skewed.
Variance gives us an idea of how spread out or concentrated the values in a dataset are. It
calculates the average of the squared differences between each data point and the mean. A
higher variance means the data points are more spread out, indicating greater variability,
while a lower variance implies the opposite.
Kurtosis describes the shape of a distribution by focusing on its tails and peak. It tells us
if the distribution is more or less peaked compared to a standard bell curve. Positive
kurtosis indicates a sharper peak and heavier tails, while negative kurtosis suggests a
flatter peak and lighter tails. A value of zero means the distribution has a similar shape to
a normal distribution.
Both RMS and MAV have been extensively used in EMG-based control systems,
particularly for prosthetic hand and limb control. This is because they are simple to
calculate, computationally efficient, and provide information about muscle activation
patterns that can be used to control prosthetic devices.
Furthermore, RMS and MAV have been shown to be relatively robust to variations in
electrode placement and muscle contraction force, making them particularly useful for
applications where electrode movement or changes in muscle activation patterns may
occur. In addition, variance, kurtosis, and skewness are also utilized in EMG-based
control systems. Variance provides information about the spread of EMG signal values,
while kurtosis and skewness offer insights into the shape and asymmetry of the signal
distribution. These measures can enhance the understanding of muscle activation patterns
and contribute to the development of more precise and adaptable prosthetic control
mechanisms.
For Mean Absolute Value (MAV):
The formula for calculating Mean Absolute Value (MAV) from a set of EMG signal
samples is:
Where N is the number of samples in the signal, x i is the ith sample value, and ∑ denotes
the sum of the absolute values of all samples in the signal.
For Root Mean Square (RMS):
RMS= (√ N1 ) ∑ x 2
i
(4.2)
Where N is the number of samples in the signal, x iis the ith sample value, and ∑ denotes
the sum of the squared values of all samples in the signal.
For Skewness (S):
47
1 3
∗Σ ( x i−μ )
n (4.3)
S= 3
σ
Where n is the number of data points in the dataset, x i represents each individual data
point, μ is the mean of the dataset, σ is the standard deviation of the dataset.
For Variance (Var):
1 2
Var= Σ ( x i−μ ) (4.4)
n
For Kurtosis (K):
4
1 Σ ( x i−μ )
K= (4.5)
n σ4
The following code was utilized to calculate certain features:
48
variable 'act' is assigned a string value referring to the ‘action’ performed for which the
feature is being extracted.
The next line calculates the rectified signal by taking the absolute value of the selected
EMG data. The root mean square (RMS) of the signal is then computed using NumPy’s
sqrt and mean functions. Similarly, the mean absolute value (MAV) of the signal is
computed using NumPy’s mean function.
A pandas DataFrame is created to store the computed features (action, MAV, and RMS).
This DataFrame is initially empty. The 'Action', 'MAV', and 'RMS' columns are inserted
into the DataFrame.
The following if-else statement determines whether to reset the feature DataFrame or to
append the new feature values to the existing feature DataFrame. The feature DataFrame
is saved as a CSV file. The plot function is then called to plot the EMG signal against its
respective time values. If one wishes to reset the file, then simply replacing the 0 in the if
statement will reset the file and add the new row.
Overall, the code processes EMG data, computes features (RMS and MAV), and saves
the features to a CSV file. It also creates a plot of the EMG signal.
Lastly, some entries of the DataFrame are shown:
49
Fig 4-20: Visualizing Data using Seaborn – Python
The first line imports the necessary libraries for the code to run. The 'pandas' library is
used for working with data in a tabular form, 'seaborn' is used for data visualization, and
'train_test_split' is used for splitting data into training and testing sets.
The second line reads in a CSV file named 'New_ft.csv' that contains the features of the
exoskeleton hand data and assigns it to a variable called 'ftrs'.
The third and fourth lines slice the features data into X (the independent variables) and y
(the dependent variable).
The fifth line creates a scatter plot using the 'lmplot' function from Seaborn library. The
'x' and 'y' parameters specify the variables to plot on the x-axis and y-axis, respectively.
The 'data' parameter specifies the data to use, which is the 'X' DataFrame. The 'hue'
parameter specifies the column in the DataFrame that should be used to color the points,
which is the 'Action' column. The 'palette' parameter specifies the color palette to use,
which is the 'Set1' palette. The 'fit_reg' parameter is set to False to remove the regression
line from the plot.
This code generates a scatter plot of the 'RMS' and 'MAV' features from the exoskeleton
hand data, with the points colored according to the 'Action' column. The plot can be used
to visualize the relationship between the 'RMS' and 'MAV' features and the different
actions performed by the exoskeleton hand.
50
Fig 4-21: Scatter Plot of the Features
It can be clearly seen that the data is linearly separable because there is only a very small
overlap.
51
In this piece of code, we used the Scikit-learn library in Python to perform a Support
Vector Machine (SVM) classification on our EMG feature dataset.
First, we loaded our extracted feature data from the 'New_ft.csv' file using the Pandas
read_csv function. We then split our feature data into training and testing data using
Scikit-learn's train_test_split function. We set aside 30% of the data for testing, while the
remaining 70% is used for training our SVM classifier.
Next, we created an SVM classifier object using Scikit-learn's SVC function with the
linear kernel. We then fit the classifier to our training data using the fit method.
To evaluate the accuracy of our SVM classifier, we used the score method with the
testing data as input. This calculated the accuracy of our classifier on the testing data and
returned a value between 0 and 1. Additionally, we printed out the predicted values for
the testing data using the predict method.
Since, there are multiple classes the algorithm, by default, uses OVO method to
distinguish between multiple classes. Note, that OVO is more robust than OVA (see
Appendix A).
52
Fig 4-23: Electrode Setup
The setup allowed us to record and process EMG signals from the extensor digitorum
muscle group in real-time, which could be used to control external devices or provide
feedback for rehabilitation purposes.
53
Fig 4-24: Arduino Code for real-time prediction
54
4.4 Summary
This chapter dived into details of all the designed and ready-made parts involved in
fabrication, specifications of the electronics used and why they were deemed suitable.
Moreover, the data acquisition procedure is explored which constitutes reading, storing
and visualizing of raw data, which is later used to extract the relevant data to perform
hand movements. The EMG data is collected using sensors connected to the Arduino
board and transferred to a computer using serial communication. The data is then stored
in a CSV file using Python and visualized using Pandas and Matplotlib libraries. Feature
extraction techniques such as Root Mean Square (RMS) and Mean Absolute Value
(MAV) are used to identify relevant features from the EMG signal for machine learning
algorithms. The python code is explained in detail for each step of the process, including
setting up the sensors, reading and sending the data, reading and storing the data in
Python, visualizing the data, and feature extraction.
55
CHAPTER 5
RESULTS & DISCUSSION
5.1 Results
The code used in the project is designed to make real-time predictions based on the EMG
signals collected from the sensors. The Arduino code receives the coefficients and
intercepts calculated during the SVM training process in Python and uses them to make
predictions in real-time.
The predict() function is responsible for making the actual prediction. It takes in two
parameters, x1 and x2, which represent the mean absolute value (MAV) and root mean
square (RMS) of the EMG signal, respectively. The function uses the coefficients and
intercepts received from the Python code to calculate the decision values for each of the
three possible classes. The class with the highest decision value is then chosen as the
predicted class.
The main loop of the Arduino code reads in the EMG signals from the sensor and
calculates the MAV and RMS values using the collected data. These values are then
passed to the predict() function to make the actual prediction. The predicted class is then
sent to the serial monitor for real-time monitoring and analysis.
The EMG signal is only considered valid if the analogRead(A0) value is greater than 15.
If the signal is valid, the program will execute the necessary steps to calculate the MAV
and RMS values, make a prediction, and output the results to the serial monitor.
Otherwise, the program will skip the current iteration of the loop and wait for the next
valid signal to be received.
Here’s what the output window looked like:
56
Table 5-4: Prediction Accuracy of Signals
Correct Incorrect Prediction
Class Total instances
predictions predictions accuracy
Hand_Ext_a 5 4 1 80.00%
Index_ext_r 5 5 0 100.00%
Middle_ext_a 5 5 0 100.00%
Overall 15 14 1 93.33%
5.3 Summary
This chapter presents the results and analysis of a project that uses EMG signals to
classify hand movements through the implementation of a Support Vector Machine
(SVM) algorithm. The code is designed to make real-time predictions based on the EMG
signals collected from the sensors. The results suggest that the EMG signals can be
effectively used to classify hand movements, and the SVM model can accurately predict
the intended movements from these signals. The results showed that the SVM model can
accurately predict intended movements from these signals, with an overall prediction
accuracy of 93.33%. The potential applications of this technology in prosthetics and
rehabilitation are also discussed.
57
CHAPTER 6
IMPACT AND ECONOMIC ANALYSIS
58
Addressing Limitations: The limitations of the hand exoskeleton, such as lack of
wrist and arm control, less precision in position control of fingers, and no haptic
feedback, highlight areas for future improvement and research. Addressing these
limitations can lead to the development of more advanced and effective assistive
technologies, which can have a significant social impact by improving the
functionality and usability of the exoskeleton, and ultimately benefiting patients in
their rehabilitation journey.
In conclusion, the hand exoskeleton designed for assistive grasping in rehabilitation has
the potential to bring about positive social impact by improving rehabilitation outcomes,
increasing independence, enhancing access to employment opportunities, promoting
inclusive design, driving technological advancements, raising awareness and education,
and addressing limitations for future improvements.
59
increased productivity, employment opportunities, and participation in the workforce,
leading to social and economic empowerment. Additionally, the utilization of 3D printing
for manufacturing the hand module and housing may have positive environmental
impacts by reducing material waste and energy consumption compared to traditional
manufacturing methods. The 3D printed parts are made up of PLA (Polylactic Acid) and
ABS (Acrylonitrile Butadiene Styrene). PLA is derived from renewable resources which
reduces the environmental impact compared to traditional plastics. ABS, on the other
hand, is not biodegradable and requires petroleum-based raw materials for its production,
which has a higher environmental footprint. However, PLA and ABS can be recycled,
reducing waste generation, and promoting circular economy principles.
60
Acrylonitrile Butadiene Styrene (ABS) is a type of thermoplastic polymer that is
commonly used in a wide range of applications, including automotive parts, consumer
goods, electronics, toys, and more. Like many other plastics, ABS has environmental
impacts throughout its lifecycle, including its production, use, and disposal. This material
is present in the hand exoskeleton as the outer casing of the servomotor. Some of the
environmental impacts associated with ABS plastic are:
Resource depletion: ABS is derived from petrochemicals, which are non-renewable
resources. The extraction and processing of these fossil fuels can result in habitat
destruction, air and water pollution, and contribute to climate change through
greenhouse gas emissions.
Energy consumption: The production of ABS requires a significant amount of
energy, primarily in the form of heat for polymerization and molding processes. The
energy used in ABS production is typically derived from fossil fuels, which can result
in greenhouse gas emissions and contribute to climate change.
Greenhouse gas emissions: ABS production involves the release of greenhouse
gases, such as carbon dioxide, methane, and nitrous oxide, which are known to
contribute to climate change. Additionally, ABS products can release greenhouse
gases when they degrade or incinerate at the end of their life cycle.
Water and air pollution: The production of ABS can result in the release of
pollutants into the air and water, including volatile organic compounds (VOCs),
particulate matter, and other hazardous chemicals. These pollutants can have negative
impacts on local air and water quality, as well as on human and ecosystem health.
Waste generation: ABS plastic products can contribute to solid waste generation
when they reach the end of their useful life and are disposed of in landfills. ABS is not
biodegradable and can persist in the environment for a long time, leading to
accumulation in landfills and potential impacts on wildlife and ecosystems.
Recycling challenges: While ABS is technically recyclable, it can be challenging to
recycle due to its complex composition and the need for specialized facilities and
processes. As a result, a significant portion of ABS plastic products may end up in
landfills or incinerators, contributing to waste and environmental pollution.
It's important to note that the environmental impact of ABS plastic can vary depending on
factors such as its production methods, use patterns, and waste management practices.
Efforts to reduce the environmental impact of ABS plastic include improving the
efficiency of production processes, increasing the use of renewable energy in production,
promoting recycling and waste reduction, and developing more sustainable alternatives to
ABS plastic.
6.3.1.2 PLA
Polylactic acid (PLA) is a type of biodegradable plastic that is commonly used in 3D
printing. PLA is derived from renewable resources, such as corn starch or sugarcane, and
is considered to be more environmentally friendly compared to traditional petroleum-
based plastics. In the hand exoskeleton, PLA has been used to manufacture all the 3D
printed parts which include hand module, housing, and rack and pinion. However, like
61
any material, PLA also has environmental impacts that need to be considered. Here are
some key environmental impacts associated with PLA:
Resource consumption: PLA is made from renewable resources, but the production
of PLA requires land, water, and energy for growing crops, processing raw materials,
and manufacturing the plastic. Depending on the agricultural practices used in crop
production, there may be impacts on soil health, water use, and biodiversity.
Energy consumption: PLA production requires energy for processing and
manufacturing, which may come from fossil fuels or renewable sources. Energy
consumption during PLA production contributes to greenhouse gas emissions and
climate change, depending on the energy source.
Emissions: PLA production can release greenhouse gases, such as carbon dioxide and
methane, during the cultivation of crops, processing of raw materials, and
manufacturing of the plastic. However, PLA generally has lower carbon emissions
compared to traditional petroleum-based plastics, as it is derived from renewable
resources.
Biodegradability and compostability: PLA is often marketed as biodegradable and
compostable, which means it can break down into natural materials when properly
managed in industrial composting facilities. However, PLA may not biodegrade or
compost in natural environments, such as landfills or marine environments, where
conditions may not be conducive to its decomposition, and improper disposal of PLA
can still contribute to pollution.
Chemical additives: PLA can sometimes contain chemical additives, such as
colorants or flame retardants, which may have environmental impacts during
production, use, and disposal. It's important to choose PLA materials that are free
from harmful additives to minimize potential environmental impacts.
Waste generation: 3D printing with PLA can generate waste in the form of failed or
rejected prints, supports, and other discarded materials. Proper management and
disposal of PLA waste are important to prevent environmental pollution and
contribute to a circular economy.
Recycling challenges: While PLA is technically recyclable, it can be challenging to
recycle PLA materials due to potential contamination from other materials used in 3D
printing, as well as the lack of widespread infrastructure for PLA recycling. This can
result in lower recycling rates and increased waste generation.
To mitigate the environmental impacts of PLA, it's important to properly manage PLA
waste through appropriate disposal methods, such as composting or recycling, when
possible. Choosing PLA materials that are made from renewable resources, produced
with minimal energy and water use, and free from harmful additives can also help reduce
the environmental footprint of PLA. Additionally, optimizing 3D printing settings to
minimize material waste and considering the end-of-life disposal options for PLA prints
can contribute to more sustainable 3D printing practices using PLA materials.
62
3D printing, also known as additive manufacturing, is a rapidly growing technology that
has the potential to revolutionize manufacturing in various industries. While 3D printing
offers many benefits, such as increased design flexibility, reduced waste, and streamlined
production, it also has environmental impacts that need to be considered. Here are some
of the key environmental impacts associated with 3D printing:
Material waste: 3D printing can generate material waste during the printing process,
including supports, rafts, and failed or rejected prints. This can contribute to solid
waste generation and potential environmental pollution if not properly managed. In
some cases, 3D printing may also require post-processing steps that generate
additional waste, such as sanding or polishing.
Energy consumption: 3D printing requires energy to power the printers, heat the
printing material, and sometimes run post-processing steps. The energy used in 3D
printing can come from various sources, including fossil fuels or renewable energy,
and can contribute to greenhouse gas emissions and climate change, depending on the
energy source.
Emissions: Depending on the type of 3D printing technology and materials used, 3D
printing can release emissions into the air during the printing process. For example,
some 3D printers that use plastics can release volatile organic compounds (VOCs) and
other potentially hazardous fumes. These emissions can contribute to indoor or
outdoor air pollution, posing potential risks to human health and the environment.
Resource consumption: 3D printing materials, such as filaments, resins, or powders,
are typically derived from various raw materials, including plastics, metals, and
ceramics, which may require extraction and processing of natural resources. This can
result in habitat destruction, air and water pollution, and contribute to climate change
through greenhouse gas emissions.
Recycling challenges: Some 3D printing materials, such as certain plastics or metal
powders, can be challenging to recycle due to potential contamination from supports,
adhesives, or other materials used during the printing process. This can result in lower
recycling rates and increased waste generation, adding to environmental impacts.
Transportation and logistics: The production, distribution, and shipment of 3D
printing materials and equipment can involve transportation and logistics, including
shipping, packaging, and storage. These activities can contribute to energy
consumption, greenhouse gas emissions, and waste generation associated with
transportation and logistics.
Design and material choices: The design and material choices made in 3D printing
can also impact the environmental footprint. For example, using large amounts of
materials or choosing materials with high environmental impacts, such as certain
metals or plastics, can exacerbate the environmental impacts of 3D printing.
To attenuate the environmental impacts of 3D printing, some measures can be taken, such
as optimizing print settings to reduce material waste, using energy-efficient printers,
selecting materials with lower environmental impacts, properly managing and disposing
of waste, exploring recycling options for used materials, and considering life cycle
assessments when making design and material choices. Additionally, using renewable
energy sources for powering 3D printers and improving the recycling and circularity of
63
3D printing materials can help reduce the environmental footprint of 3D printing
technology.
64
from the EMG sensors also demonstrates
technological advancements. Additionally, the
design of the hand module and housing, with a
distribution of weight, showcases infrastructure
development for efficient and effective
rehabilitation devices.
10. Reduced Inequalities The hand exoskeleton aims to reduce
inequalities in access to rehabilitation services
by providing an affordable and accessible
solution for individuals with hand motor
deficits and ensuring that the technology is
accessible to a wider population, including
those in resource-constrained settings. By
compensating for the deficit force of the hand,
it can help bridge the gap in functional abilities
and improve equality in rehabilitation
opportunities.
11. Sustainable Cities and The hand exoskeleton is designed to be
Communities lightweight and efficient, with a minimized
number of components in the hand module,
and heavy equipment distributed in the
housing. This design approach can contribute
to sustainable urban development by reducing
material usage, energy consumption and
promoting sustainable manufacturing and
transportation practices.
12. Responsible Consumption The design of the hand exoskeleton focuses on
and Production lightweight and minimal components, which
can help reduce resource consumption and
waste generation during manufacturing and
use. Additionally, the use of Arduino Uno for
processing sensory input from the EMG
sensors demonstrates the use of open-source
and reusable technology, promoting
responsible consumption and production
practices.
13. Climate Action N/A
14. Life Below Water N/A
15. Life on Land N/A
16. Peace, Justice and Strong N/A
Institutions
17. Partnerships The development of the hand exoskeleton
65
involves collaboration between different
disciplines, such as engineering, rehabilitation,
healthcare sciences and machine learning. This
interdisciplinary approach showcases the
importance of partnerships and collaboration in
achieving sustainable development goals and
addressing complex challenges.
6.5 Summary
The hand exoskeleton designed for assistive grasping in rehabilitation has significant
social impact potential, improving rehabilitation outcomes, increasing independence, and
enhancing access to employment opportunities. Its inclusive design promotes inclusivity
and diversity, and technological advancements drive innovation. The project aims for
economic sustainability by using locally available components, energy-efficient actuators,
and reusable systems, reducing production costs. Impact sustainability is achieved
through improved quality of life, increased productivity, and participation in the
workforce. The use of 3D printing with PLA, a renewable resource, reduces material
waste and energy consumption, promoting environmental sustainability.
However, ABS plastic, used in the servomotor casing, has environmental impacts such as
resource depletion, energy consumption, and emissions. ABS production causes
pollution, waste generation, and recycling challenges. PLA, used in 3D printed parts, is
considered more environmentally friendly, but still has impacts such as biodegradability
challenges and waste generation. Proper waste management, choosing renewable PLA
materials, optimizing printing settings, and using sustainable production methods can
mitigate environmental impacts.
In conclusion, the hand exoskeleton has social impact potential and aligns with
sustainability goals. Efforts should be made to reduce environmental impacts of ABS and
PLA through sustainable production, renewable energy use, waste reduction, proper
waste management, and material choice. Optimizing 3D printing settings can also
contribute to sustainable practices.
66
CHAPTER 7
CONCLUSION AND FUTURE RECOMMENDATIONS
7.1 Conclusions
The development of hand exoskeletons has the potential to revolutionize rehabilitation for
individuals with hand injuries. The aim of this project was to design and develop a
wearable hand exoskeleton rehabilitation device that would assist individuals with hand
disabilities to perform necessary hand functions based on signals coming from the nerves.
The main focus is maintained on creating a simple, lightweight, and cost-effective model
that does not severely limit the movement capability of individual fingers while also
achieving high accuracy of classification and precise control of the motors. The project
utilized 3 channels of EMG sensor to obtain nerve signals, which were then processed by
a machine learning classification algorithm, Support Vector Machine (SVM) along with
various Python libraries, to recognize the movement. The study found that the designed
model was effective in aiding individuals to perform accurate hand movements.
Additionally, the project's sustainability, compliance with SDGs, social impact on users,
and environmental impact of processes were all analyzed, yielding a positive outlook.
Overall, this project strongly demonstrates the potential of wearable hand exoskeleton
rehabilitation devices in improving hand function and helping individuals regain their
independence, with further research and development leading to more advanced models
that can be widely used in the future. Due to lack of availability of a disabled patient, for
demonstration, the sensors capturing EMG signals were worn by one person, while the
exoskeleton hand was worn by another person. The exoskeleton was successfully being
controlled by the person not wearing it, and the corresponding movements were being
seen on the person wearing it.
67
References
[1] Disability statistics. Pakistan Bureau of Statistics. (n.d.) [w]. Retrieved September 7,
2022, from https://www.pbs.gov.pk/content/disability-statistics [R].
[2] Feedforward Friction Compensation of Bowden-Cable Transmission Via Loop
Routing Useok Jeong, and Kyu-Jin Cho, Member, IEEE [J].
[3] Franziska Ryser, Tobias Butzer et al. (2017), Fully Embedded Myoelectric Control
for a Wearable Robotic Hand Orthosis [J].
[4] Habibi, E., Kazemi, M., Dehghan, H., Mahaki, B., & Hassanzadeh, A. (2013). Hand
grip and pinch strength: Effects of workload, hand dominance, age, and Body Mass
Index [J].
[5] Hong Kai et al. (2015), A Soft Exoskeleton for Hand Assistive and Rehabilitation
Application using Pneumatic Actuators with Variable Stiffness [J].
[6] Luca Randazzo et al. (2018), mano: A Wearable Hand Exoskeleton for Activities of
Daily Living and Neurorehabilitation [J].
[7] Phinyomark, A., Phukpattaranont, P., & Limsakul, C. (2012). Feature reduction and
selection for EMG signal classification. Expert Systems with Applications [J].
[8] Tam et al. BMC (2019), Human motor decoding from neural Signals [J].
[9] Tobias Butzer et al. (2020), Fully Wearable Actuated Soft Exoskeleton[J]. for
Grasping Assistance in Everyday Activities (SOFT ROBOTICS Volume 00, Number
00, 2020) [J].
[J] = Journal Article [B] = Book/Manual [D] = Dissertation/Thesis [R] = Report [C] = Conference
proceeding [W] = Webpage
68
Appendices
69
Appendix B (Python Code)
Only Data Acquisition
ser = serial.Serial('COM3', 9600) # 'COM3' is the serial port
csv_file = open('hand_ext_a.csv', 'w', newline='')
csv_writer = csv.writer(csv_file,delimiter='\t')
start_time = time.time() # get the current time in seconds since the epoch
while True:
if ser.in_waiting > 0:
line = ser.readline().decode().strip()
emg_value = int(line)
csv_writer.writerow([elapsed_time, emg_value])
n=0
print("Get Ready!")
feature_df1 = pd.DataFrame()
feature_df1.insert(0, 'Action',[0])
feature_df1.insert(1, 'MAV',[0])
#feature_df.insert(1, 'RMS',[0])
feature_df1.insert(2, 'Var', [0])
feature_df1.insert(3, 'Skew', [0])
feature_df1.insert(4, 'Kurt', [0])
feature_df2 = pd.DataFrame()
70
feature_df2.insert(0, 'Action',[0])
feature_df2.insert(1, 'MAV',[0])
#feature_df.insert(1, 'RMS',[0])
feature_df2.insert(2, 'Var', [0])
feature_df2.insert(3, 'Skew', [0])
feature_df2.insert(4, 'Kurt', [0])
feature_df3 = pd.DataFrame()
feature_df3.insert(0, 'Action',[0])
feature_df3.insert(1, 'MAV',[0])
#feature_df.insert(1, 'RMS',[0])
feature_df3.insert(2, 'Var', [0])
feature_df3.insert(3, 'Skew', [0])
feature_df3.insert(4, 'Kurt', [0])
k1 = pd.DataFrame()
k1.insert(0, 'does not matter', [0])
k1.insert(1, 'value', [0])
k2 = pd.DataFrame()
k2.insert(0, 'does not matter', [0])
k2.insert(1, 'value', [0])
k3 = pd.DataFrame()
k3.insert(0, 'does not matter', [0])
k3.insert(1, 'value', [0])
act = ['ext','grasp']
rec = [0,0,0]
print(len(act))
for i in range(len(act)):
for l in range(3):
print("1")
time.sleep(1)
print("2")
time.sleep(1)
print("3")
time.sleep(1)
print("RECORDING")
71
for j in range(1000):
line = ser.readline().decode().strip()
for h in range(len(line)):
if line[h] == ',':
rec[c] = int(strip)
c=c+1
strip = ''
else:
strip = strip + line[h]
emg_value1 = rec[0]
emg_value2 = rec[1]
emg_value3 = rec[2]
y1 = pd.DataFrame()
y1.insert(0, 'does not matter', [0])
y1.insert(1, 'value', [emg_value1])
k1 = k1.append(y1, ignore_index=True);
y2 = pd.DataFrame()
y2.insert(0, 'does not matter', [0])
y2.insert(1, 'value', [emg_value2])
k2 = k2.append(y2, ignore_index=True);
y3 = pd.DataFrame()
y3.insert(0, 'does not matter', [0])
y3.insert(1, 'value', [emg_value3])
k3 = k3.append(y3, ignore_index=True);
#elapsed_time = time.time() - start_time
#csv_writer.writerow([elapsed_time, emg_value])
#csv_file.close()
72
mav1 = np.mean(rectified_signal)
var1 = (emg_signal1/mav1).var()
skew1 = (emg_signal1/mav1).skew()
kurt1 = (emg_signal1/mav1).kurtosis()
val1 = pd.DataFrame()
val1.insert(0, 'Action',[act[i]])
val1.insert(1, 'MAV',mav1)
#val1.insert(2, 'RMS',rms)
val1.insert(2, 'Var', var1)
val1.insert(3, 'Skew', skew1)
val1.insert(4, 'Kurt', kurt1)
val2 = pd.DataFrame()
val2.insert(0, 'Action',[act[i]])
val2.insert(1, 'MAV',mav2)
#val1.insert(2, 'RMS',rms)
val2.insert(2, 'Var', var2)
val2.insert(3, 'Skew', skew2)
val2.insert(4, 'Kurt', kurt2)
val3 = pd.DataFrame()
val3.insert(0, 'Action',[act[i]])
val3.insert(1, 'MAV',mav3)
#val1.insert(2, 'RMS',rms)
val3.insert(2, 'Var', var3)
val3.insert(3, 'Skew', skew3)
73
val3.insert(4, 'Kurt', kurt3)
print('Success')
file_name1 = 'if1.csv'
file_name2 = 'if2.csv'
file_name3 = 'if3.csv'
for i in range(len(act)):
if a[0] == act[i]:
ctr[i] = ctr[i] + 1
if b[0] == act[i]:
ctr[i] = ctr[i] + 1
if c[0] == act[i]:
ctr[i] = ctr[i] + 1
x=0
for i in range(len(ctr)):
74
if ctr[i] >= 2:
x=i
break
return x
X_train1 = ftrs1.iloc[:,1:]
y_train1 = ftrs1.iloc[:,0]
X_train2 = ftrs2.iloc[:,1:]
y_train2 = ftrs2.iloc[:,0]
X_train3 = ftrs3.iloc[:,1:]
y_train3 = ftrs3.iloc[:,0]
clf1 = svm.SVC(kernel='linear')
clf1.fit(X_train1,y_train1)
clf2 = svm.SVC(kernel='linear')
clf2.fit(X_train2,y_train2)
clf3 = svm.SVC(kernel='linear')
clf3.fit(X_train3,y_train3)
while True:
feature_df1 = pd.DataFrame()
feature_df1.insert(0, 'Action',[0])
feature_df1.insert(1, 'MAV',[0])
#feature_df.insert(1, 'RMS',[0])
feature_df1.insert(2, 'Var', [0])
feature_df1.insert(3, 'Skew', [0])
feature_df1.insert(4, 'Kurt', [0])
75
feature_df2 = pd.DataFrame()
feature_df2.insert(0, 'Action',[0])
feature_df2.insert(1, 'MAV',[0])
#feature_df.insert(1, 'RMS',[0])
feature_df2.insert(2, 'Var', [0])
feature_df2.insert(3, 'Skew', [0])
feature_df2.insert(4, 'Kurt', [0])
feature_df3 = pd.DataFrame()
feature_df3.insert(0, 'Action',[0])
feature_df3.insert(1, 'MAV',[0])
#feature_df.insert(1, 'RMS',[0])
feature_df3.insert(2, 'Var', [0])
feature_df3.insert(3, 'Skew', [0])
feature_df3.insert(4, 'Kurt', [0])
k1 = pd.DataFrame()
k1.insert(0, 'does not matter', [0])
k1.insert(1, 'value', [0])
k2 = pd.DataFrame()
k2.insert(0, 'does not matter', [0])
k2.insert(1, 'value', [0])
k3 = pd.DataFrame()
k3.insert(0, 'does not matter', [0])
k3.insert(1, 'value', [0])
print("hi")
rec = [0,0,0]
start_time = time.time() # get the current time in seconds since the epoch
start_time = time.time() # get the current time in seconds since the epoch
line = ser.readline().decode().strip()
if line[h] == ',':
76
rec[c] = int(strip)
c=c+1
strip = ''
else:
strip = strip + line[h]
emg_value1 = rec[0]
emg_value2 = rec[1]
emg_value3 = rec[2]
y1 = pd.DataFrame()
y1.insert(0, 'does not matter', [0])
y1.insert(1, 'value', [emg_value1])
k1 = k1.append(y1, ignore_index=True);
y2 = pd.DataFrame()
y2.insert(0, 'does not matter', [0])
y2.insert(1, 'value', [emg_value2])
k2 = k2.append(y2, ignore_index=True);
y3 = pd.DataFrame()
y3.insert(0, 'does not matter', [0])
y3.insert(1, 'value', [emg_value3])
k3 = k3.append(y3, ignore_index=True);
print("hehe")
n=1
77
rms = np.sqrt(np.mean(rectified_signal**2))
# Compute the mean absolute value (MAV) of the signal
mav3 = np.mean(rectified_signal)
var3 = (emg_signal3/mav3).var()
skew3 = (emg_signal3/mav3).skew()
kurt3 = (emg_signal3/mav3).kurtosis()
val1 = pd.DataFrame()
val1.insert(0, 'Action',[0])
val1.insert(1, 'MAV',mav1)
#val1.insert(2, 'RMS',rms)
val1.insert(2, 'Var', var1)
val1.insert(3, 'Skew', skew1)
val1.insert(4, 'Kurt', kurt1)
val2 = pd.DataFrame()
val2.insert(0, 'Action',[0])
val2.insert(1, 'MAV',mav2)
#val1.insert(2, 'RMS',rms)
val2.insert(2, 'Var', var2)
val2.insert(3, 'Skew', skew2)
val2.insert(4, 'Kurt', kurt2)
val3 = pd.DataFrame()
val3.insert(0, 'Action',[0])
val3.insert(1, 'MAV',mav3)
#val1.insert(2, 'RMS',rms)
val3.insert(2, 'Var', var3)
val3.insert(3, 'Skew', skew3)
val3.insert(4, 'Kurt', kurt3)
a = clf1.predict(feature_df1.iloc[1:,1:])
b = clf2.predict(feature_df2.iloc[1:,1:])
c = clf3.predict(feature_df3.iloc[1:,1:])
act = ['ext','grasp']
ctr = [0,0]
78
Appendix C (Arduino Code)
#include <Servo.h>
double coef[3][4] = {{-0.83141334, -0.00376557, -0.02959438, -0.16066591},
{-0.17895769, -0.00364335, -0.02300627, -0.15123637},
{0.20783653, -0.03972082, -0.2027858, -1.34818274}};
double intercept[3] = {109.10151124, 24.19844435, -22.71124905};
int vote(String a, String b, String c, String* act, int* ctr, int size) {
for (int i = 0; i < size; ++i) {
if (a == act[i]) {
ctr[i] += 1;
}
if (b == act[i]) {
ctr[i] += 1;
}
if (c == act[i]) {
ctr[i] += 1;
}
}
int x = 0;
for (int i = 0; i < size; ++i) {
if (ctr[i] >= 2) {
x = i;
break;
}
}
return x;
//if (ctr[1]>=1){x=1;return x;}
//else{return x;}
}
79
decision_values[i] += intercept[i];
}
double max_value = decision_values[0];
int max_index = 0;
for (int i = 1; i < 3; i++) {
if (decision_values[i] > max_value) {
max_value = decision_values[i];
max_index = i;
}
}
return max_index;
}
80
}
int n = NUM_SAMPLES;
void setup() {
Serial.begin(9600);
thumbServo.attach(3);
indexServo.attach(5);
middleServo.attach(9);
rinkyServo.attach(11);
thumbServo.write(0);
indexServo.write(0);
middleServo.write(0);
rinkyServo.write(0);
}
void loop() {
int data = analogRead(0);
int data2 = analogRead(1);
int data3 = analogRead(2);
int d = data+data2+data3;
double MAV1 = 0;
double MAV2 = 0;
double MAV3 = 0;
int prediction = 0;
int emgData1[NUM_SAMPLES];
int emgData2[NUM_SAMPLES];
int emgData3[NUM_SAMPLES];
81
double sumMAV2 = 0;
double sumRMS2 = 0;
double sumMAV3 = 0;
double sumRMS3 = 0;
sumMAV2 += abs(emgData2[i]);
sumRMS2 += emgData2[i] * emgData2[i];
sumMAV3 += abs(emgData3[i]);
sumRMS3 += emgData3[i] * emgData3[i];
}
double a1 = MAV1;
double a2 = variance(emgData1, n, mu1);
double a3 = skewness(emgData1, n, mu1, a2);
double a4 = kurtosis(emgData1, n, mu1, a2);
double b1 = MAV2;
double b2 = variance(emgData2, n, mu2);
double b3 = skewness(emgData2, n, mu2, b2);
double b4 = kurtosis(emgData2, n, mu2, b2);
double c1 = MAV3;
double c2 = variance(emgData3, n, mu3);
double c3 = skewness(emgData3, n, mu3, c2);
double c4 = kurtosis(emgData3, n, mu3, c2);
82
int prediction1 = predict(a1, a2, a3, a4);
int prediction2 = predict(b1, b2, b3,b4);
int prediction3 = predict(c1, c2, c3, c4);
String a = act[prediction1];
String b = act[prediction2];
String c = act[prediction3];
/*Serial.print("Prediction1: " );
Serial.println(a);
Serial.print("Prediction2: " );
Serial.println(b);
Serial.print("Prediction3: " );
Serial.println(c);*/
if (prediction == 1) {
thumbServo.write(thumbAngle);
indexServo.write(indexAngle);
middleServo.write(middleAngle);
rinkyServo.write(rinkyAngle);
} else if (prediction == 0) {
thumbServo.write(20);
indexServo.write(20);
middleServo.write(20);
rinkyServo.write(20);
} else if (prediction == "thumb_flex") {
thumbServo.write(thumbAngle);
} else if (prediction == "thumb_ext") {
thumbServo.write(0);
} else if (prediction == "index_flex") {
indexServo.write(indexAngle);
83
} else if (prediction == "index_ext") {
indexServo.write(0);
} else if (prediction == "middle_flex") {
middleServo.write(middleAngle);
} else if (prediction == "middle_ext") {
middleServo.write(0);
} else if (prediction == "rinky_flex") {
rinkyServo.write(rinkyAngle);
} else if (prediction == "rinky_ext") {
rinkyServo.write(0);
}
delay(2000);
//Serial.println(analogRead(0));
//Serial.print(data);Serial.print(",");Serial.print(data2);Serial.print(",");Serial.print(data3);
Serial.println();
Serial.print(pred[prediction]);}
//Serial.print(data);Serial.print(",");Serial.print(data2);Serial.print(" ,0,1000,
");Serial.print(data3);Serial.println();}
//Serial.print(data);
//Serial.print(",");Serial.print(data2);Serial.print(",");Serial.print(data3);Serial.println();}
//Serial.print(" ,0,1000, ");
//Serial.println(data2);
//Serial.println();
/*Serial.print(", ");
Serial.print(data3);
Serial.println();*/
//else{Serial.println(0);}
//Serial.print(data2);
//Serial.print(" ,0,4000, ");
//Serial.print(data);
//Serial.println();
84
Appendix D (CAD Drawings)
85
86
87
88
89
90
91