In this notebook, we will be classifying emotion based on facial expression. The datasets we will be using are called:
Inspect and explore data
Select and engineer features
Build and train model
Evaluate model
# Import libraries.
# Use inline so our visualizations display in notebook.
%matplotlib inline
import matplotlib.pyplot as plt # Data visualization.
import numpy as np # Data wrangling.
import os # Manipulate operating system interfaces.
import pandas as pd # Data handling.
import pickle # Python object serialization.
import plotly.express as px # Data visualization
import plotly.graph_objects as go # Data visualization
import seaborn as sns # Data visualization.
sns.set()
import warnings # Ignore all warnings.
warnings.filterwarnings("ignore")
from imblearn.over_sampling import RandomOverSampler # Perform random over-sampling.
from keras.utils.np_utils import to_categorical # Convert a class vector (integers) to binary class matrix.
from sklearn.preprocessing import StandardScaler # To perform standardization by centering and scaling.
from sklearn.metrics import accuracy_score, classification_report, confusion_matrix, f1_score, multilabel_confusion_matrix, precision_score, recall_score
from sklearn.model_selection import train_test_split # To split data in training/validating/testing.
from statistics import mode # Find the most likely predicted emotion.
from tensorflow.keras import models # Group layers into an object with training and inference features.
from tensorflow.keras import layers # Keras layers API.
from tensorflow.keras import Input # Keras Input API. Instantiate a Keras tensor.
from tensorflow.keras.callbacks import EarlyStopping # Stop training when a monitored metric has stopped improving.
from tensorflow.keras.callbacks import ReduceLROnPlateau # Reduce learning rate when a metric has stopped improving.
from tensorflow.keras.optimizers import Adam # Adam optimizer.
from tensorflow.keras.models import load_model # To load the model.
from tensorflow.keras.utils import plot_model # Visualize the model and save it.
/opt/conda/lib/python3.10/site-packages/scipy/__init__.py:146: UserWarning: A NumPy version >=1.16.5 and <1.23.0 is required for this version of SciPy (detected version 1.23.5 warnings.warn(f"A NumPy version >={np_minversion} and <{np_maxversion}"
FER_2013_PATH = "../input/fer2013/fer2013.csv"
CK_PATH = "../input/ckdataset/ckextended.csv"
fer_2013_df = pd.read_csv(FER_2013_PATH)
# Sanity check.
print("fer_2013_df.shape =", fer_2013_df.shape, "\n")
print("Unique emotions =", sorted(fer_2013_df["emotion"].unique()), "\n")
print("# of Unique emotions =", len(fer_2013_df["emotion"].unique()), "\n")
print(fer_2013_df.emotion.value_counts(), "\n")
print("Unique Usage =", sorted(fer_2013_df["Usage"].unique()), "\n")
print("# of Unique Usage =", len(fer_2013_df["Usage"].unique()), "\n")
print(fer_2013_df.Usage.value_counts(), "\n")
print("0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral\n")
fer_2013_df
fer_2013_df.shape = (35887, 3) Unique emotions = [0, 1, 2, 3, 4, 5, 6] # of Unique emotions = 7 3 8989 6 6198 4 6077 2 5121 0 4953 5 4002 1 547 Name: emotion, dtype: int64 Unique Usage = ['PrivateTest', 'PublicTest', 'Training'] # of Unique Usage = 3 Training 28709 PublicTest 3589 PrivateTest 3589 Name: Usage, dtype: int64 0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral
emotion | pixels | Usage | |
---|---|---|---|
0 | 0 | 70 80 82 72 58 58 60 63 54 58 60 48 89 115 121... | Training |
1 | 0 | 151 150 147 155 148 133 111 140 170 174 182 15... | Training |
2 | 2 | 231 212 156 164 174 138 161 173 182 200 106 38... | Training |
3 | 4 | 24 32 36 30 32 23 19 20 30 41 21 22 32 34 21 1... | Training |
4 | 6 | 4 0 0 0 0 0 0 0 0 0 0 0 3 15 23 28 48 50 58 84... | Training |
... | ... | ... | ... |
35882 | 6 | 50 36 17 22 23 29 33 39 34 37 37 37 39 43 48 5... | PrivateTest |
35883 | 3 | 178 174 172 173 181 188 191 194 196 199 200 20... | PrivateTest |
35884 | 0 | 17 17 16 23 28 22 19 17 25 26 20 24 31 19 27 9... | PrivateTest |
35885 | 3 | 30 28 28 29 31 30 42 68 79 81 77 67 67 71 63 6... | PrivateTest |
35886 | 2 | 19 13 14 12 13 16 21 33 50 57 71 84 97 108 122... | PrivateTest |
35887 rows × 3 columns
# Create a mapping dictionary to match the integer values to a string of emotion.
emotions_classes_dict = {0 : "Angry",
1 : "Disgust",
2 : "Fear",
3 : "Happy",
4 : "Sad",
5 : "Surprise",
6 : "Neutral"}
# Map the integer values to their corresponding emotion labels.
fer_2013_df["emotion_label"] = fer_2013_df["emotion"].map(emotions_classes_dict)
fer_2013_df
emotion | pixels | Usage | emotion_label | |
---|---|---|---|---|
0 | 0 | 70 80 82 72 58 58 60 63 54 58 60 48 89 115 121... | Training | Angry |
1 | 0 | 151 150 147 155 148 133 111 140 170 174 182 15... | Training | Angry |
2 | 2 | 231 212 156 164 174 138 161 173 182 200 106 38... | Training | Fear |
3 | 4 | 24 32 36 30 32 23 19 20 30 41 21 22 32 34 21 1... | Training | Sad |
4 | 6 | 4 0 0 0 0 0 0 0 0 0 0 0 3 15 23 28 48 50 58 84... | Training | Neutral |
... | ... | ... | ... | ... |
35882 | 6 | 50 36 17 22 23 29 33 39 34 37 37 37 39 43 48 5... | PrivateTest | Neutral |
35883 | 3 | 178 174 172 173 181 188 191 194 196 199 200 20... | PrivateTest | Happy |
35884 | 0 | 17 17 16 23 28 22 19 17 25 26 20 24 31 19 27 9... | PrivateTest | Angry |
35885 | 3 | 30 28 28 29 31 30 42 68 79 81 77 67 67 71 63 6... | PrivateTest | Happy |
35886 | 2 | 19 13 14 12 13 16 21 33 50 57 71 84 97 108 122... | PrivateTest | Fear |
35887 rows × 4 columns
# Visualize a picture for each and every emotion.
def string_to_array(pixel_string, width, height):
pixel_values = [int(x) for x in pixel_string.split()]
return np.array(pixel_values).reshape(height, width)
def display_images(df):
emotions = df["emotion"].unique()
fig, axes = plt.subplots(1, len(emotions), figsize=(len(emotions) * 3, 3))
for emotion, ax in zip(emotions, axes):
emotion_df = df[df["emotion"] == emotion]
pixel_string = emotion_df.iloc[0]["pixels"]
image_array = string_to_array(pixel_string, 48, 48)
ax.imshow(image_array, cmap="gray")
ax.set_title(emotion)
ax.axis("off")
plt.show()
tmp_data = {
"emotion": ["Angry", "Disgust", "Fear", "Happy", "Sad", "Surprise", "Neutral"],
"pixels": [
fer_2013_df["pixels"][0],
fer_2013_df["pixels"][299],
fer_2013_df["pixels"][2],
fer_2013_df["pixels"][7],
fer_2013_df["pixels"][3],
fer_2013_df["pixels"][15],
fer_2013_df["pixels"][4],
],
}
tmp_df = pd.DataFrame(tmp_data)
display_images(tmp_df)
ck_df = pd.read_csv(CK_PATH)
# Remove the Contempt emotion.
ck_df = ck_df.loc[ck_df["emotion"] != 7]
# Sanity check.
print("ck_df.shape =", ck_df.shape, "\n")
print("Unique emotions = ", sorted(ck_df["emotion"].unique()), "\n")
print("# of Unique emotions =", len(ck_df["emotion"].unique()), "\n")
print(ck_df.emotion.value_counts(), "\n")
print("Unique Usage =", sorted(ck_df["Usage"].unique()), "\n")
print("# of Unique Usage =", len(ck_df["Usage"].unique()), "\n")
print(ck_df.Usage.value_counts(), "\n")
print("0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral\n")
ck_df
ck_df.shape = (902, 3) Unique emotions = [0, 1, 2, 3, 4, 5, 6] # of Unique emotions = 7 6 593 5 83 3 69 1 59 0 45 4 28 2 25 Name: emotion, dtype: int64 Unique Usage = ['PrivateTest', 'PublicTest', 'Training'] # of Unique Usage = 3 Training 720 PrivateTest 93 PublicTest 89 Name: Usage, dtype: int64 0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral
emotion | pixels | Usage | |
---|---|---|---|
0 | 6 | 36 39 35 25 19 11 8 7 3 13 15 9 21 57 75 90 10... | Training |
1 | 6 | 88 74 19 4 5 5 3 12 8 21 15 21 15 18 24 29 32 ... | Training |
2 | 6 | 9 2 4 7 1 1 1 0 7 29 49 76 115 141 156 169 177... | Training |
3 | 6 | 104 106 108 104 95 50 60 61 58 83 126 133 139 ... | Training |
4 | 6 | 68 72 67 67 6 2 1 1 1 1 1 14 24 24 38 65 79 94... | Training |
... | ... | ... | ... |
915 | 5 | 87 86 88 92 92 127 231 248 251 253 254 254 254... | PrivateTest |
916 | 5 | 21 24 26 28 27 28 30 8 0 0 0 0 0 0 1 4 37 42 4... | PrivateTest |
917 | 5 | 76 40 31 38 28 34 38 36 41 36 46 38 44 26 45 5... | PrivateTest |
918 | 5 | 114 87 16 29 17 25 30 34 37 35 45 93 63 80 73 ... | PrivateTest |
919 | 5 | 101 102 99 96 98 42 23 18 15 17 27 34 17 24 29... | PrivateTest |
902 rows × 3 columns
# Match the integer values to a string of emotion using the emotions_classes_dict mapping dictionary.
# Map the integer values to their corresponding emotion labels.
ck_df["emotion_label"] = ck_df["emotion"].map(emotions_classes_dict)
ck_df
emotion | pixels | Usage | emotion_label | |
---|---|---|---|---|
0 | 6 | 36 39 35 25 19 11 8 7 3 13 15 9 21 57 75 90 10... | Training | Neutral |
1 | 6 | 88 74 19 4 5 5 3 12 8 21 15 21 15 18 24 29 32 ... | Training | Neutral |
2 | 6 | 9 2 4 7 1 1 1 0 7 29 49 76 115 141 156 169 177... | Training | Neutral |
3 | 6 | 104 106 108 104 95 50 60 61 58 83 126 133 139 ... | Training | Neutral |
4 | 6 | 68 72 67 67 6 2 1 1 1 1 1 14 24 24 38 65 79 94... | Training | Neutral |
... | ... | ... | ... | ... |
915 | 5 | 87 86 88 92 92 127 231 248 251 253 254 254 254... | PrivateTest | Surprise |
916 | 5 | 21 24 26 28 27 28 30 8 0 0 0 0 0 0 1 4 37 42 4... | PrivateTest | Surprise |
917 | 5 | 76 40 31 38 28 34 38 36 41 36 46 38 44 26 45 5... | PrivateTest | Surprise |
918 | 5 | 114 87 16 29 17 25 30 34 37 35 45 93 63 80 73 ... | PrivateTest | Surprise |
919 | 5 | 101 102 99 96 98 42 23 18 15 17 27 34 17 24 29... | PrivateTest | Surprise |
902 rows × 4 columns
# Visualize a picture for each and every emotion.
tmp_data = {
"emotion": ["Angry", "Disgust", "Fear", "Happy", "Sad", "Surprise", "Neutral"],
"pixels": [
ck_df["pixels"][680],
ck_df["pixels"][725],
ck_df["pixels"][812],
ck_df["pixels"][593],
ck_df["pixels"][784],
ck_df["pixels"][837],
ck_df["pixels"][0],
],
}
tmp_df = pd.DataFrame(tmp_data)
display_images(tmp_df)
combined_df = pd.concat([fer_2013_df, ck_df],axis=0)
# Sanity check.
print('combined_df.shape =', combined_df.shape, '\n')
combined_df.info()
combined_df.shape = (36789, 4) <class 'pandas.core.frame.DataFrame'> Int64Index: 36789 entries, 0 to 919 Data columns (total 4 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 emotion 36789 non-null int64 1 pixels 36789 non-null object 2 Usage 36789 non-null object 3 emotion_label 36789 non-null object dtypes: int64(1), object(3) memory usage: 1.4+ MB
print("Unique emotions =", sorted(combined_df["emotion"].unique()), "\n")
print("# of Unique emotions =", len(combined_df["emotion"].unique()), "\n")
print(combined_df.emotion.value_counts(), "\n")
print("Unique emotions =", sorted(combined_df["emotion_label"].unique()), "\n")
print("# of Unique emotions =", len(combined_df["emotion_label"].unique()), "\n")
print(combined_df.emotion_label.value_counts(), "\n")
print("Unique Usage =", sorted(combined_df["Usage"].unique()), "\n")
print("# of Unique Usage =", len(fer_2013_df["Usage"].unique()), "\n")
print(fer_2013_df.Usage.value_counts(), "\n")
print("0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral\n")
combined_df
Unique emotions = [0, 1, 2, 3, 4, 5, 6] # of Unique emotions = 7 3 9058 6 6791 4 6105 2 5146 0 4998 5 4085 1 606 Name: emotion, dtype: int64 Unique emotions = ['Angry', 'Disgust', 'Fear', 'Happy', 'Neutral', 'Sad', 'Surprise'] # of Unique emotions = 7 Happy 9058 Neutral 6791 Sad 6105 Fear 5146 Angry 4998 Surprise 4085 Disgust 606 Name: emotion_label, dtype: int64 Unique Usage = ['PrivateTest', 'PublicTest', 'Training'] # of Unique Usage = 3 Training 28709 PublicTest 3589 PrivateTest 3589 Name: Usage, dtype: int64 0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral
emotion | pixels | Usage | emotion_label | |
---|---|---|---|---|
0 | 0 | 70 80 82 72 58 58 60 63 54 58 60 48 89 115 121... | Training | Angry |
1 | 0 | 151 150 147 155 148 133 111 140 170 174 182 15... | Training | Angry |
2 | 2 | 231 212 156 164 174 138 161 173 182 200 106 38... | Training | Fear |
3 | 4 | 24 32 36 30 32 23 19 20 30 41 21 22 32 34 21 1... | Training | Sad |
4 | 6 | 4 0 0 0 0 0 0 0 0 0 0 0 3 15 23 28 48 50 58 84... | Training | Neutral |
... | ... | ... | ... | ... |
915 | 5 | 87 86 88 92 92 127 231 248 251 253 254 254 254... | PrivateTest | Surprise |
916 | 5 | 21 24 26 28 27 28 30 8 0 0 0 0 0 0 1 4 37 42 4... | PrivateTest | Surprise |
917 | 5 | 76 40 31 38 28 34 38 36 41 36 46 38 44 26 45 5... | PrivateTest | Surprise |
918 | 5 | 114 87 16 29 17 25 30 34 37 35 45 93 63 80 73 ... | PrivateTest | Surprise |
919 | 5 | 101 102 99 96 98 42 23 18 15 17 27 34 17 24 29... | PrivateTest | Surprise |
36789 rows × 4 columns
print(combined_df.isnull().sum(), "\n")
print("combined_df.isnull().sum().sum() =", combined_df.isnull().sum().sum())
emotion 0 pixels 0 Usage 0 emotion_label 0 dtype: int64 combined_df.isnull().sum().sum() = 0
# Sanity check.
print('combined_df.shape =', combined_df.shape, "\n")
print("combined_df.duplicated().sum() =", combined_df.duplicated().sum())
print("Removing duplicate rows from combined_df...\n")
combined_df = combined_df.drop_duplicates()
# Sanity check.
print('combined_df.shape =', combined_df.shape)
combined_df.shape = (36789, 4) combined_df.duplicated().sum() = 1234 Removing duplicate rows from combined_df... combined_df.shape = (35555, 4)
# List the distribution of each emotion used for Training, Private Test, and Public Test.
emotion_counts_by_usage = combined_df.groupby(["Usage", "emotion"]).size().reset_index(name="Count")
# Map the integer values to their corresponding emotion labels.
emotion_counts_by_usage["emotion_label"] = emotion_counts_by_usage["emotion"].map(emotions_classes_dict)
emotion_counts_by_usage
Usage | emotion | Count | emotion_label | |
---|---|---|---|---|
0 | PrivateTest | 0 | 492 | Angry |
1 | PrivateTest | 1 | 61 | Disgust |
2 | PrivateTest | 2 | 528 | Fear |
3 | PrivateTest | 3 | 885 | Happy |
4 | PrivateTest | 4 | 595 | Sad |
5 | PrivateTest | 5 | 420 | Surprise |
6 | PrivateTest | 6 | 686 | Neutral |
7 | PublicTest | 0 | 468 | Angry |
8 | PublicTest | 1 | 61 | Disgust |
9 | PublicTest | 2 | 493 | Fear |
10 | PublicTest | 3 | 901 | Happy |
11 | PublicTest | 4 | 655 | Sad |
12 | PublicTest | 5 | 411 | Surprise |
13 | PublicTest | 6 | 664 | Neutral |
14 | Training | 0 | 3885 | Angry |
15 | Training | 1 | 428 | Disgust |
16 | Training | 2 | 3929 | Fear |
17 | Training | 3 | 7142 | Happy |
18 | Training | 4 | 4755 | Sad |
19 | Training | 5 | 2748 | Surprise |
20 | Training | 6 | 5348 | Neutral |
# Create the countplot with the emotion labels as the x-axis and Usage as the hue.
g = sns.catplot(data=emotion_counts_by_usage, x="emotion", y="Count", hue="Usage", kind="bar", palette="muted", height=6, aspect=2)
# Set the x-axis labels to emotion labels using the emotions_classes_dict.
g.set_xticklabels([emotions_classes_dict[int(t.get_text())] for t in g.ax.get_xticklabels()])
# Set axis labels and title.
g.set_axis_labels("Emotion", "Count")
g.ax.set_title("Emotion Counts by Usage Type")
plt.show()
# Create separate dataframes for each Usage.
data = []
for usage in emotion_counts_by_usage["Usage"].unique():
df_usage = emotion_counts_by_usage[emotion_counts_by_usage["Usage"] == usage]
trace = go.Bar(x=[emotions_classes_dict[emotion] for emotion in df_usage["emotion"]], y=df_usage["Count"], name=usage)
data.append(trace)
# Create layout.
layout = go.Layout(title="Emotion Counts by Usage Type",
xaxis=dict(title="Emotion"),
yaxis=dict(title="Count"),
legend=dict(x=1.0, y=1.0),
barmode="group")
# Create a figure with all prepared data for the plot.
fig = go.Figure(data=data, layout=layout)
fig.show()
# Create the countplot with the usage as the x-axis and emotion as the hue.
g = sns.catplot(data=emotion_counts_by_usage, x="Usage", y="Count", hue="emotion", kind="bar", palette="muted", height=6, aspect=2)
# Set the hue labels to emotion labels using the emotions_classes_dict.
g.legend.set_title("Emotion")
for t, l in zip(g.legend.texts, emotions_classes_dict.values()):
t.set_text(l)
# Set axis labels and title.
g.set_axis_labels("Usage", "Count")
g.ax.set_title("Emotion Counts by Usage Type")
plt.show()
# Create two separate dataframes for each emotion.
data = []
for emotion in emotions_classes_dict.keys():
df_emotion = emotion_counts_by_usage[emotion_counts_by_usage["emotion"] == emotion]
trace = go.Bar(x=df_emotion["Usage"], y=df_emotion["Count"], name=emotions_classes_dict[emotion])
data.append(trace)
# Create layout.
layout = go.Layout(title="Emotion Counts by Usage Type",
xaxis=dict(title="Usage"),
yaxis=dict(title="Count"),
legend=dict(x=1.0, y=1.0),
barmode="group")
# Create a figure with all prepared data for the plot.
fig = go.Figure(data=data, layout=layout)
fig.show()
combined_df.emotion.value_counts(), combined_df.emotion.value_counts(normalize=True)
(3 8928 6 6698 4 6005 2 4950 0 4845 5 3579 1 550 Name: emotion, dtype: int64, 3 0.251104 6 0.188384 4 0.168893 2 0.139221 0 0.136268 5 0.100661 1 0.015469 Name: emotion, dtype: float64)
combined_df.emotion_label.value_counts(), combined_df.emotion_label.value_counts(normalize=True)
(Happy 8928 Neutral 6698 Sad 6005 Fear 4950 Angry 4845 Surprise 3579 Disgust 550 Name: emotion_label, dtype: int64, Happy 0.251104 Neutral 0.188384 Sad 0.168893 Fear 0.139221 Angry 0.136268 Surprise 0.100661 Disgust 0.015469 Name: emotion_label, dtype: float64)
emotions_classes = sorted(combined_df["emotion"].unique())
emotions_classes_label = sorted(combined_df["emotion_label"].unique())
print(emotions_classes, "\n")
print(emotions_classes_label, "\n")
print("0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral")
[0, 1, 2, 3, 4, 5, 6] ['Angry', 'Disgust', 'Fear', 'Happy', 'Neutral', 'Sad', 'Surprise'] 0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral
plt.figure(figsize = (13,10))
plt.title("Emotion Classes Count")
ax = sns.countplot(data = combined_df, x = "emotion_label", order = combined_df["emotion_label"].value_counts().index, palette = ["#636efa", "#ef553b", "#00cc96", "#ab63fa", "#ffa15a", "#19d3f3", "#ff6692"])
ax.bar_label(ax.containers[0])
plt.show()
fig = px.bar(data_frame = fer_2013_df,
x = combined_df["emotion_label"].value_counts().index,
y = combined_df["emotion_label"].value_counts(),
color = combined_df["emotion_label"].value_counts().index,
labels={"x":"emotion", "y":"count"},
title = "Emotion Classes Count")
fig.show()
combined_df["emotion_label"].value_counts()
labels = combined_df["emotion_label"].value_counts().index
values = combined_df["emotion_label"].value_counts().values
colors=["#636efa", "#ef553b", "#00cc96", "#ab63fa", "#ffa15a", "#19d3f3", "#ff6692"]
fig = go.Figure(data=[go.Pie(labels = labels, values = values, textinfo = "label+percent",
insidetextorientation = "radial",marker = dict(colors = colors))])
fig.update_layout(title="Emotion Classes Counts")
fig.show()
X = combined_df["pixels"]
X
0 70 80 82 72 58 58 60 63 54 58 60 48 89 115 121... 1 151 150 147 155 148 133 111 140 170 174 182 15... 2 231 212 156 164 174 138 161 173 182 200 106 38... 3 24 32 36 30 32 23 19 20 30 41 21 22 32 34 21 1... 4 4 0 0 0 0 0 0 0 0 0 0 0 3 15 23 28 48 50 58 84... ... 915 87 86 88 92 92 127 231 248 251 253 254 254 254... 916 21 24 26 28 27 28 30 8 0 0 0 0 0 0 1 4 37 42 4... 917 76 40 31 38 28 34 38 36 41 36 46 38 44 26 45 5... 918 114 87 16 29 17 25 30 34 37 35 45 93 63 80 73 ... 919 101 102 99 96 98 42 23 18 15 17 27 34 17 24 29... Name: pixels, Length: 35555, dtype: object
y = combined_df["emotion"]
y
0 0 1 0 2 2 3 4 4 6 .. 915 5 916 5 917 5 918 5 919 5 Name: emotion, Length: 35555, dtype: int64
# Balance the dataset by oversampling the minority class such as the emotion disgust.
# The class distribution of the minority class after sampling will be equal to the one of the majority class.
oversampler = RandomOverSampler(sampling_strategy="auto")
X, y = oversampler.fit_resample(X.values.reshape(-1,1), y)
X
array([['70 80 82 72 58 58 60 63 54 58 60 48 89 115 121 119 115 110 98 91 84 84 90 99 110 126 143 153 158 171 169 172 169 165 129 110 113 107 95 79 66 62 56 57 61 52 43 41 65 61 58 57 56 69 75 70 65 56 54 105 146 154 151 151 155 155 150 147 147 148 152 158 164 172 177 182 186 189 188 190 188 180 167 116 95 103 97 77 72 62 55 58 54 56 52 44 50 43 54 64 63 71 68 64 52 66 119 156 161 164 163 164 167 168 170 174 175 176 178 179 183 187 190 195 197 198 197 198 195 191 190 145 86 100 90 65 57 60 54 51 41 49 56 47 38 44 63 55 46 52 54 55 83 138 157 158 165 168 172 171 173 176 179 179 180 182 185 187 189 189 192 197 200 199 196 198 200 198 197 177 91 87 96 58 58 59 51 42 37 41 47 45 37 35 36 30 41 47 59 94 141 159 161 161 164 170 171 172 176 178 179 182 183 183 187 189 192 192 194 195 200 200 199 199 200 201 197 193 111 71 108 69 55 61 51 42 43 56 54 44 24 29 31 45 61 72 100 136 150 159 163 162 163 170 172 171 174 177 177 180 187 186 187 189 192 192 194 195 196 197 199 200 201 200 197 201 137 58 98 92 57 62 53 47 41 40 51 43 24 35 52 63 75 104 129 143 149 158 162 164 166 171 173 172 174 178 178 179 187 188 188 191 193 194 195 198 199 199 197 198 197 197 197 201 164 52 78 87 69 58 56 50 54 39 44 42 26 31 49 65 91 119 134 145 147 152 159 163 167 171 170 169 174 178 178 179 187 187 185 187 190 188 187 191 197 201 199 199 200 197 196 197 182 58 62 77 61 60 55 49 59 52 54 44 22 30 47 68 102 123 136 144 148 150 153 157 167 172 173 170 171 177 179 178 186 190 186 189 196 193 191 194 190 190 192 197 201 203 199 194 189 69 48 74 56 60 57 50 59 59 51 41 20 34 47 79 111 132 139 143 145 147 150 151 160 169 172 171 167 171 177 177 174 180 182 181 192 196 189 192 198 195 194 196 198 201 202 195 189 70 39 69 61 61 61 53 59 59 45 40 26 40 61 93 124 135 138 142 144 146 151 152 158 165 168 168 165 161 164 173 172 167 172 167 180 198 198 193 199 195 194 198 200 198 197 195 190 65 35 68 59 59 62 57 60 59 50 44 32 54 90 115 132 137 138 140 144 146 146 156 165 168 174 176 176 175 168 168 169 171 175 171 172 192 194 184 198 205 201 194 195 193 195 192 186 57 38 72 65 57 62 58 57 60 54 49 47 79 116 130 138 141 141 139 141 143 145 157 164 164 166 173 174 176 179 179 176 181 189 188 173 180 175 160 182 189 198 192 189 190 190 188 172 46 44 64 66 59 62 57 56 62 53 50 66 103 133 137 141 143 141 136 132 131 136 127 118 111 107 108 123 131 143 154 158 166 177 181 175 170 159 148 171 161 176 185 192 194 188 190 162 53 49 58 63 61 61 55 56 61 51 50 81 116 139 142 142 146 144 136 128 119 112 97 85 90 91 88 92 90 80 81 84 106 122 132 144 145 144 147 163 147 163 173 181 190 187 191 167 61 48 53 61 61 58 54 56 61 51 53 89 123 140 144 145 146 147 136 122 107 99 95 92 90 87 83 76 67 52 46 52 63 69 83 96 119 132 148 159 136 137 143 138 143 152 156 156 70 48 50 59 61 57 54 54 61 52 56 93 124 135 140 144 148 150 140 125 114 101 80 54 56 54 41 41 33 40 39 35 49 60 63 74 107 129 147 147 116 111 100 77 76 86 108 111 73 49 50 60 62 60 57 55 63 59 56 89 121 134 139 146 151 152 150 141 127 111 96 77 85 70 32 31 37 91 65 50 48 59 73 83 112 136 155 130 60 46 38 40 43 81 116 91 72 52 48 58 62 62 59 53 61 59 52 85 114 134 140 147 154 159 158 153 145 143 150 126 121 125 68 45 89 137 95 70 78 75 95 109 131 153 171 94 23 16 32 82 82 65 113 77 71 54 48 56 62 62 60 53 60 56 52 75 108 133 141 149 158 166 169 167 163 156 155 146 112 119 134 127 142 140 121 117 129 114 120 129 146 174 191 98 46 33 33 109 147 98 109 67 73 55 50 56 64 64 61 58 61 53 54 64 106 129 140 148 159 169 175 176 174 165 159 156 145 120 115 124 127 131 133 141 147 142 141 147 161 182 202 154 114 96 100 158 158 153 123 61 76 57 48 56 64 64 63 62 61 54 55 44 97 131 137 147 158 168 177 181 183 179 170 168 169 165 155 152 151 152 154 162 165 158 153 158 168 187 206 186 147 135 144 145 152 178 115 57 74 58 48 58 64 63 63 59 63 55 53 66 104 130 132 144 153 162 170 180 185 187 181 178 182 180 177 173 171 171 177 176 172 164 161 167 164 185 207 197 173 152 141 141 161 191 104 54 69 60 48 57 65 62 60 57 64 55 50 94 111 124 130 135 150 159 163 172 179 184 184 178 178 177 173 171 174 177 178 176 169 165 161 163 161 180 205 201 183 171 177 178 180 194 101 55 65 60 47 55 65 63 59 58 63 57 52 90 105 117 122 130 143 153 157 163 171 174 182 183 182 178 174 175 175 177 175 172 163 161 159 157 162 178 200 201 188 181 172 177 187 198 98 57 63 61 48 52 61 64 63 60 65 57 51 95 104 113 117 127 136 145 152 156 162 162 165 173 177 182 183 183 180 181 177 165 153 154 152 153 160 174 193 200 188 185 180 182 192 196 101 60 60 56 49 50 60 66 64 62 64 59 53 99 104 111 112 118 132 142 147 155 158 160 159 162 171 176 184 186 183 180 169 154 141 135 145 155 164 180 196 205 188 189 188 189 193 192 98 61 64 55 49 49 60 66 63 64 63 60 57 99 105 108 112 113 125 139 143 150 155 158 164 169 174 176 182 183 182 177 163 141 133 147 151 164 170 185 200 210 194 188 192 186 185 180 88 64 67 60 46 50 59 65 64 64 64 59 56 101 103 108 109 109 118 134 143 143 147 155 159 166 171 174 177 179 178 172 153 129 143 161 159 166 171 186 197 207 203 185 191 183 179 164 73 67 67 66 48 50 57 65 65 63 64 61 57 103 108 114 112 110 115 128 138 144 145 152 156 159 164 168 172 172 169 161 139 125 147 156 161 162 164 180 188 188 197 185 187 181 180 137 65 70 68 70 52 47 53 62 65 63 65 61 58 105 109 112 120 113 112 122 134 141 149 150 153 155 159 164 167 167 162 152 134 115 126 119 106 99 109 141 158 150 155 175 184 176 175 106 63 70 68 68 50 46 50 57 63 63 64 61 59 107 110 110 117 117 114 117 128 137 147 148 150 153 156 161 162 163 156 150 148 105 70 45 26 25 47 73 74 79 128 177 180 173 157 77 66 68 67 68 52 49 51 56 62 62 62 62 60 101 107 108 114 115 114 117 125 134 143 148 149 152 154 158 160 158 155 160 158 132 88 73 73 64 52 66 91 138 160 174 173 171 125 64 67 63 64 68 54 50 49 54 60 60 60 62 60 98 105 105 109 111 114 117 125 131 139 145 148 153 153 156 157 156 161 168 165 153 139 122 115 105 89 103 150 182 161 171 173 162 89 64 64 62 64 69 56 48 49 56 58 60 59 62 60 89 99 108 106 109 111 119 120 125 134 140 146 152 153 153 153 156 159 162 160 150 136 129 133 133 122 133 148 178 168 168 175 132 61 67 66 65 63 69 57 47 50 55 58 59 61 62 60 89 96 105 107 105 107 117 120 123 124 133 141 149 153 151 145 151 145 139 140 138 128 126 124 129 125 136 142 164 172 168 168 87 58 67 63 62 61 69 57 39 44 55 56 59 63 62 62 84 91 92 98 102 103 113 119 121 118 128 138 146 151 147 142 140 128 127 128 129 126 135 140 135 130 143 146 149 166 174 131 62 65 62 59 67 63 68 83 89 65 42 52 60 60 62 63 77 84 84 91 99 101 107 112 117 118 122 134 145 149 144 134 127 127 129 130 134 125 126 132 152 153 151 150 151 165 171 87 59 65 64 61 58 86 122 138 208 207 154 71 52 56 55 56 69 77 83 85 93 91 102 112 116 118 119 127 140 144 142 131 112 95 85 75 62 58 56 59 87 88 83 127 142 165 149 62 65 62 59 77 113 192 156 84 185 196 197 168 81 70 75 69 58 65 73 82 81 79 95 107 114 116 116 123 136 142 136 132 131 102 71 58 49 41 33 41 36 49 60 99 136 168 111 53 63 71 138 186 203 195 146 87 91 72 79 95 103 82 61 74 55 57 68 75 76 77 84 96 106 110 111 121 130 138 136 142 153 159 152 152 154 145 133 136 147 158 156 155 147 158 74 57 60 123 181 174 126 89 72 67 57 43 55 67 76 86 60 45 51 45 52 68 75 73 77 88 96 100 104 113 115 121 134 146 149 146 149 148 155 168 174 179 178 169 169 174 161 131 44 47 82 150 168 136 104 75 66 80 67 58 48 54 68 88 121 102 51 45 38 53 66 65 70 86 92 96 102 103 109 116 130 136 136 133 136 138 137 135 128 130 143 158 165 164 147 87 62 74 123 160 170 100 99 107 79 71 86 75 57 45 49 65 122 130 43 48 40 39 55 61 59 71 82 87 88 93 105 118 123 128 130 124 111 98 94 88 67 55 84 129 147 148 105 48 82 142 161 164 164 76 72 85 100 88 72 90 84 54 48 54 73 100 73 36 44 31 37 53 51 55 67 74 77 87 97 108 118 125 132 122 106 86 80 82 75 73 83 110 129 126 46 22 130 177 196 193 166 72 52 54 73 100 92 75 99 95 65 68 61 63 91 65 42 37 22 28 39 44 57 68 74 83 92 101 119 131 143 141 134 136 140 139 134 136 139 138 136 85 23 114 202 198 199 180 173 98 36 86 130 150 137 99 77 101 99 72 56 43 77 82 79 70 56 28 20 25 36 50 63 73 83 98 111 124 139 156 160 159 169 168 165 163 159 149 114 43 26 133 183 192 177 152 137 130 125 139 173 195 186 137 101 88 101 105 70 46 77 72 84 87 87 81 64 37 20 31 40 46 65 88 108 110 125 149 157 153 162 164 158 159 154 140 78 21 11 61 144 168 173 157 138 150 148 132 159 182 183 136 106 116 95 106 109 82'], ['151 150 147 155 148 133 111 140 170 174 182 154 153 164 173 178 185 185 189 187 186 193 194 185 183 186 180 173 166 161 147 133 172 151 114 161 161 146 131 104 95 132 163 123 119 129 140 120 151 149 149 153 137 115 129 166 170 181 164 143 157 156 169 179 185 183 186 186 184 190 191 184 186 190 183 175 168 160 147 136 135 167 136 108 153 167 149 137 111 90 134 162 121 122 141 137 151 151 156 143 116 124 159 164 174 169 135 144 155 153 164 170 176 178 177 178 187 185 181 182 183 181 178 170 164 158 148 144 130 136 173 130 97 137 167 157 138 113 90 138 168 109 123 146 151 152 155 127 113 159 167 170 171 142 131 140 154 162 168 169 169 164 168 173 176 179 178 176 173 172 170 161 154 152 146 145 137 124 130 171 124 102 133 164 152 138 110 86 154 149 100 139 153 151 136 113 142 159 161 174 150 127 136 140 154 164 163 167 173 172 171 170 167 168 172 167 162 161 160 163 163 154 145 146 140 133 122 135 167 127 101 126 164 147 132 95 91 166 115 113 158 143 121 134 153 153 164 162 131 130 136 146 155 158 155 157 163 163 158 159 159 161 165 156 153 156 159 163 163 150 149 150 146 140 137 122 147 154 116 97 133 164 142 123 77 117 147 95 149 127 129 153 142 165 171 136 116 129 130 139 140 149 153 147 146 150 150 155 151 155 156 153 152 157 165 165 160 150 156 156 148 141 135 135 132 147 141 110 97 143 165 142 101 66 151 117 136 125 148 139 153 173 159 118 116 119 123 131 134 145 145 137 142 151 157 159 153 154 153 150 159 170 171 167 160 160 159 158 152 141 145 144 140 119 144 133 106 101 151 148 130 70 119 148 129 143 146 134 165 165 134 121 123 121 125 129 136 150 159 163 165 161 157 155 147 150 148 135 156 171 169 171 166 165 165 158 153 146 147 150 137 122 112 144 122 94 111 158 143 91 74 155 134 147 131 154 167 153 107 125 129 134 137 134 136 154 168 174 171 168 163 156 151 150 146 146 166 171 173 176 177 191 187 175 180 179 174 165 142 134 108 111 137 108 86 143 156 116 64 133 143 129 138 162 167 132 117 127 128 140 147 145 155 159 165 170 172 166 160 154 156 160 156 156 158 153 170 188 198 208 203 209 219 217 200 174 171 147 128 92 129 131 88 103 159 135 75 111 133 126 160 156 159 117 132 129 131 144 160 165 162 166 170 165 160 160 159 153 149 150 151 148 120 132 189 183 180 177 187 201 194 186 181 170 167 146 118 117 100 139 112 74 139 145 85 91 120 151 156 164 129 111 135 140 139 142 158 163 172 190 195 189 168 156 149 143 137 140 140 132 114 142 150 129 133 125 138 140 132 114 124 131 118 143 125 126 82 97 141 72 113 152 93 77 134 157 149 166 95 124 136 142 156 165 179 189 198 198 186 186 188 157 126 124 118 129 128 115 95 100 90 84 81 66 72 80 77 63 59 65 70 67 86 105 82 95 138 89 94 153 106 74 153 148 165 134 80 134 137 149 173 187 193 201 185 169 141 128 150 139 113 119 123 128 114 91 71 60 51 69 78 80 80 79 76 76 65 65 85 103 46 73 142 85 114 114 75 145 121 70 150 141 168 76 91 130 144 179 197 210 190 164 146 128 121 90 92 90 90 114 104 109 110 81 52 53 87 96 94 99 96 88 95 108 116 110 97 120 98 64 120 77 105 130 67 131 128 67 137 150 136 46 106 126 164 164 151 150 131 115 103 88 90 91 92 78 56 49 65 90 115 66 29 91 99 104 107 88 94 105 111 118 113 116 103 103 120 47 83 110 120 134 67 116 136 74 133 163 82 51 113 115 104 112 144 162 138 104 96 93 95 90 91 99 103 55 29 46 45 16 40 104 95 101 92 93 96 80 69 82 99 98 108 116 128 70 102 117 127 140 79 105 139 82 140 149 23 43 103 50 78 179 185 147 111 113 117 106 91 95 100 91 96 92 19 6 19 8 35 88 64 66 60 72 68 34 35 70 85 101 118 130 143 82 114 125 130 149 93 104 139 83 148 130 67 86 94 42 142 173 151 128 124 106 96 103 90 89 100 97 95 103 28 48 127 126 38 87 81 68 64 70 74 51 68 92 112 139 149 152 158 100 134 128 133 159 105 95 139 86 153 113 62 126 130 62 135 136 127 122 93 67 54 44 42 43 60 76 86 104 41 115 197 211 112 67 118 109 107 93 84 83 101 108 129 150 158 156 143 105 140 129 131 166 115 85 137 87 156 99 21 89 104 46 117 135 113 78 52 89 94 46 24 45 54 65 95 92 49 161 213 212 188 85 97 133 130 124 121 122 136 147 143 154 151 173 124 115 147 129 124 164 136 95 126 85 159 94 36 97 117 78 93 130 90 73 77 94 86 61 60 72 69 91 113 80 76 188 214 213 200 143 66 119 138 150 151 151 159 163 156 154 162 140 107 136 148 135 115 158 150 116 111 87 161 101 50 109 111 98 69 99 97 109 111 114 106 95 96 99 101 109 122 70 111 193 214 217 200 183 115 78 124 157 167 170 172 166 161 147 127 118 134 138 141 141 119 147 155 131 98 96 163 103 72 120 112 115 80 100 117 100 105 127 138 133 132 140 137 121 103 64 153 200 215 218 205 183 169 127 77 92 111 122 126 128 128 129 135 145 141 138 133 138 127 139 169 127 83 93 161 106 87 122 106 117 114 88 124 116 108 135 150 151 156 163 162 121 66 110 177 201 212 214 200 181 162 155 117 97 103 109 115 119 131 141 147 149 141 146 140 128 130 140 170 101 87 89 163 109 105 123 103 112 133 113 95 132 139 160 167 169 158 149 118 72 89 133 169 189 197 204 194 180 172 166 138 114 112 114 120 119 125 134 140 150 145 142 145 142 144 152 164 116 95 84 159 113 111 124 101 112 136 134 115 107 118 131 138 129 108 93 75 88 112 136 170 183 181 178 181 177 177 180 175 130 110 119 125 130 131 135 138 142 146 136 130 146 142 157 169 135 91 100 150 115 114 138 107 114 133 125 124 133 137 128 119 106 102 98 93 96 141 160 147 149 150 145 153 161 147 146 175 162 114 112 121 127 128 131 129 134 142 136 124 139 141 157 175 138 106 149 145 119 119 150 109 115 136 129 126 131 137 130 114 106 108 109 99 115 156 138 116 134 139 130 140 119 53 45 103 139 131 101 98 117 126 130 123 124 133 137 131 130 143 160 177 148 158 182 146 122 117 161 118 116 131 134 130 130 124 121 118 117 119 112 92 107 134 74 29 73 122 129 126 78 33 42 65 107 123 103 97 96 113 121 119 121 124 131 130 128 157 176 179 185 187 181 158 124 111 169 131 116 123 129 130 129 129 128 125 124 114 86 89 104 91 46 31 35 74 113 107 93 79 79 94 121 131 126 119 92 100 114 114 117 115 120 125 130 164 179 188 192 182 184 182 140 106 170 147 120 121 129 130 127 128 129 128 123 98 62 98 116 113 94 90 85 81 80 104 116 126 141 136 148 148 129 122 101 95 107 107 117 123 125 130 139 165 179 184 192 184 183 191 167 119 173 158 136 118 130 131 125 123 123 125 114 81 71 131 147 148 138 133 117 101 88 118 131 143 163 167 165 164 146 132 122 107 95 104 115 126 136 137 146 168 180 179 195 187 183 188 186 141 158 165 147 116 131 130 124 118 117 119 102 70 101 140 154 153 142 134 125 118 117 118 116 120 137 149 163 172 158 144 137 138 117 112 123 129 139 146 157 167 175 178 196 190 184 190 188 167 155 174 161 134 119 114 121 118 113 109 89 93 134 140 161 170 145 126 108 121 117 108 117 136 146 142 138 150 149 147 135 142 122 115 142 149 144 144 162 168 171 176 192 187 182 192 183 185 175 174 145 87 65 63 84 112 114 98 92 109 140 155 151 147 123 103 111 112 116 108 111 127 130 117 115 124 129 138 124 118 120 114 135 144 141 144 154 168 170 174 187 182 179 187 184 184 187 132 28 6 24 36 39 69 111 99 84 109 134 133 108 97 84 75 78 74 81 80 62 48 32 27 36 42 56 88 87 86 123 130 138 138 134 141 151 170 167 173 189 184 181 187 186 191 192 63 26 18 3 16 22 23 85 106 98 110 112 89 47 35 41 20 18 21 13 9 7 3 2 4 4 3 4 30 40 49 112 129 142 143 139 144 151 171 174 177 188 186 181 189 189 170 124 47 33 28 8 4 12 11 48 111 113 100 85 50 19 4 5 3 10 14 12 15 26 31 35 38 37 40 62 84 73 61 124 143 138 130 140 139 152 157 170 179 186 186 181 179 115 39 26 14 6 10 3 1 5 2 48 124 117 106 71 34 42 32 17 17 24 35 41 49 63 67 70 72 88 106 118 107 97 75 95 139 138 125 130 131 150 160 173 183 188 186 184 114 54 84 157 71 0 6 5 4 3 0 75 122 109 103 69 53 67 68 59 55 54 59 65 74 88 97 104 109 119 129 128 127 130 98 96 136 156 130 117 128 148 165 176 190 191 186 183 74 64 167 201 164 43 1 0 0 3 46 114 118 110 110 67 66 97 83 86 87 98 93 98 102 112 119 119 125 126 130 128 127 135 134 112 123 150 136 118 126 146 158 179 196 188 185 185 142 50 136 182 194 173 83 58 60 100 130 121 113 120 116 80 77 95 103 111 116 124 116 120 117 119 108 123 129 144 160 143 129 130 127 132 123 149 132 117 125 142 151 188 194 187 184 185 197 128 84 169 177 199 178 162 147 154 146 124 107 129 120 96 88 99 131 131 145 131 137 148 131 133 128 124 118 142 167 159 130 136 134 127 128 143 133 118 139 140 144 192 196 187 184 184 188 188 121 86 175 189 192 157 148 144 149 126 113 123 127 106 96 107 145 143 144 131 147 150 143 147 134 126 131 151 170 162 148 116 139 122 124 141 127 112 128 116 113 159 201 185 185 186 188 187 196 129 85 171 194 171 142 144 144 128 119 116 121 116 100 108 136 146 128 135 151 146 152 150 158 143 133 143 159 153 152 128 137 133 125 127 102 108 109 105 102 106 197 186 182 187 186 184 185 197 124 84 174 185 150 129 143 135 115 102 111 124 112 109 132 146 135 149 148 143 163 156 159 150 139 128 116 125 133 109 130 147 130 121 105 108 95 108 102 67 171 193 183 184'], ['231 212 156 164 174 138 161 173 182 200 106 38 39 74 138 161 164 179 190 201 210 216 220 224 222 218 216 213 217 220 220 218 217 212 174 160 162 160 139 135 137 131 94 56 36 44 27 16 229 175 148 173 154 151 171 172 183 101 23 25 67 127 164 170 171 182 199 212 219 220 224 226 226 226 226 226 227 227 228 225 221 217 202 174 158 155 145 126 126 129 99 70 37 27 35 27 214 156 157 168 153 172 168 175 100 16 8 47 117 169 175 175 183 194 204 214 218 223 225 227 228 226 229 231 232 232 229 227 225 224 217 198 174 154 134 110 97 115 96 69 52 28 22 28 202 153 166 156 164 166 170 116 26 2 21 90 165 180 180 181 192 206 210 213 216 220 224 224 226 224 228 227 227 228 231 229 225 228 226 218 196 168 148 111 80 89 97 56 50 38 26 23 190 163 167 156 168 172 136 36 0 24 69 146 180 179 183 196 204 211 214 215 214 217 218 217 220 223 220 220 222 224 227 229 228 228 228 227 216 187 156 125 92 62 83 61 44 46 31 31 181 173 156 167 176 159 61 0 8 62 121 175 185 188 197 210 215 222 225 221 216 214 214 214 213 214 214 214 216 218 218 220 220 225 226 220 216 205 175 133 98 68 61 70 47 41 36 36 183 169 159 174 181 97 6 1 27 98 158 180 192 205 212 221 222 224 229 227 223 217 215 214 211 211 211 209 212 213 212 210 210 215 214 215 212 208 191 140 100 70 57 73 55 37 35 33 183 160 171 185 138 21 0 0 48 140 179 191 210 224 229 231 228 226 231 231 230 222 218 214 212 208 209 210 209 207 208 206 207 209 208 206 208 203 195 166 106 66 55 65 51 53 26 36 172 164 175 174 58 0 1 0 85 172 184 210 237 243 244 237 235 229 225 232 236 226 218 216 213 212 209 207 211 209 209 208 209 208 206 205 205 200 192 178 121 65 50 54 37 42 55 35 171 166 178 124 9 2 0 21 138 184 189 203 214 210 223 241 240 231 218 225 231 226 226 220 217 217 218 214 214 216 219 215 215 216 210 206 201 195 192 181 148 79 45 45 46 31 42 66 171 168 176 52 0 4 0 73 175 186 201 198 191 167 146 181 223 235 220 219 223 219 223 222 222 222 221 222 219 223 224 223 225 223 215 209 203 197 191 183 170 104 45 38 53 40 26 50 166 179 138 45 13 5 23 142 185 175 178 166 165 151 108 103 131 192 224 220 212 217 216 213 224 222 222 224 222 226 226 227 225 220 212 209 205 200 194 188 179 132 53 22 48 49 31 29 175 177 104 59 37 15 72 182 181 158 113 102 140 130 100 79 69 95 167 205 203 204 204 210 219 217 218 224 225 229 230 226 221 215 209 209 206 199 194 191 184 150 73 21 23 55 39 31 185 148 99 62 53 31 116 194 190 164 111 75 98 124 112 75 56 53 87 144 178 181 194 205 211 208 216 221 221 223 222 213 203 207 208 209 208 204 198 195 181 157 88 28 14 35 60 40 164 140 104 70 53 35 149 210 213 202 132 37 61 127 160 143 89 57 66 95 126 152 186 211 215 202 201 207 210 205 206 206 205 215 218 213 210 205 204 196 186 160 97 31 19 10 46 70 155 142 103 71 38 46 184 231 235 230 193 105 57 43 52 92 118 93 70 81 92 133 182 210 212 196 177 186 193 204 208 219 223 223 225 222 218 212 203 196 185 161 109 37 15 24 9 61 156 140 105 69 34 62 217 244 245 239 226 184 95 82 20 2 58 101 83 77 84 143 202 210 203 185 150 146 143 147 152 155 165 154 155 174 181 199 210 197 180 165 124 43 16 27 20 11 153 137 114 69 32 82 238 247 249 245 233 219 146 99 110 104 126 121 67 101 109 175 224 220 208 192 146 98 79 64 52 49 60 63 75 102 126 151 174 187 181 164 137 49 12 21 34 13 146 134 119 68 27 98 248 245 250 249 245 238 214 148 108 130 155 171 171 164 165 212 239 226 215 196 132 76 60 51 47 50 59 80 108 121 141 156 163 153 170 171 140 49 3 20 30 25 140 137 104 65 22 96 251 243 249 251 247 243 240 228 199 178 180 186 190 189 205 244 246 231 214 187 137 104 66 50 55 74 113 118 124 151 145 147 165 163 155 170 137 42 0 19 26 31 151 113 80 71 12 91 245 238 244 248 247 242 239 234 229 222 214 216 218 222 239 249 242 232 211 191 179 166 91 32 35 19 63 119 104 95 129 117 122 156 166 163 137 43 0 17 24 33 129 78 81 60 6 83 234 228 236 238 243 244 245 244 242 241 243 245 245 244 247 245 238 216 202 200 202 185 161 114 66 18 0 12 51 48 57 107 106 142 162 166 148 46 0 12 19 27 113 81 77 48 11 66 226 221 234 239 242 245 247 249 248 250 252 253 247 248 248 243 232 203 202 207 213 207 190 157 131 114 73 63 52 20 17 59 126 143 166 177 158 47 0 5 14 16 112 89 72 48 2 70 222 213 226 235 241 243 244 245 246 247 247 245 246 250 248 242 227 202 202 208 215 219 207 194 152 110 88 79 66 74 102 117 147 171 187 187 161 46 0 1 10 11 109 84 71 31 0 98 224 203 215 228 235 237 241 238 237 234 220 236 251 254 246 244 222 202 202 206 215 223 218 211 209 185 150 143 151 172 195 198 196 200 199 190 159 42 0 0 6 16 93 78 78 27 3 131 224 203 211 220 228 231 233 238 234 206 217 250 253 251 247 242 217 200 196 202 211 224 227 218 213 223 223 217 212 207 207 209 208 206 202 190 152 37 0 2 1 19 88 104 85 28 11 156 219 204 218 229 239 245 249 250 248 232 241 249 252 248 244 241 213 193 191 194 208 219 226 229 223 220 224 224 220 215 213 213 210 208 201 184 135 26 1 3 0 14 107 132 71 41 57 182 233 234 246 252 253 251 248 246 242 233 222 236 241 244 249 232 204 202 192 184 204 216 222 228 227 224 224 225 220 217 217 214 211 205 195 171 95 19 4 4 0 6 145 148 168 225 247 248 250 250 244 241 246 246 244 239 229 216 204 167 180 216 221 215 200 210 200 185 201 212 220 225 225 224 219 217 218 215 214 211 207 200 184 133 59 21 3 4 0 4 165 227 255 255 249 238 231 232 235 236 244 239 229 209 191 199 197 136 122 154 147 153 181 197 193 191 196 210 217 219 221 218 213 209 210 211 209 207 196 189 155 83 33 18 11 0 0 4 232 255 252 251 249 244 241 239 235 235 220 196 182 172 180 156 166 174 94 102 101 66 86 140 191 207 196 205 214 213 213 211 208 205 202 201 200 191 180 164 113 71 73 46 1 0 1 2 255 254 255 254 255 255 255 250 236 227 219 203 181 175 127 142 171 201 111 80 130 144 125 159 217 221 202 201 208 207 206 202 197 195 190 189 183 174 158 129 100 95 93 28 0 3 0 5 254 255 255 255 255 251 252 245 223 213 225 233 236 230 209 197 195 198 166 139 186 197 209 217 223 225 212 202 203 199 196 191 183 180 177 170 163 153 132 121 117 123 82 8 1 2 0 14 255 255 255 255 252 247 245 234 216 218 220 219 222 227 234 235 228 218 185 193 206 206 214 217 220 224 214 204 201 195 187 179 174 166 158 149 145 132 137 132 143 124 38 0 5 0 0 29 255 255 255 254 251 248 243 230 220 219 218 216 223 224 224 226 224 227 211 204 227 223 214 212 215 218 211 204 198 191 180 169 163 153 142 139 133 140 139 49 58 25 0 4 8 0 5 45 255 255 254 250 245 243 240 238 234 230 224 209 205 206 205 208 210 216 214 120 146 211 220 213 210 208 207 200 190 184 172 160 150 140 136 138 140 156 93 0 0 0 3 12 14 1 10 53 255 254 250 249 247 244 244 246 249 250 244 227 217 212 205 197 177 184 205 103 37 91 154 202 209 200 197 192 184 175 161 149 143 142 143 149 151 145 38 2 9 3 16 23 27 2 11 65 253 249 248 248 248 245 245 249 250 251 251 230 223 232 231 233 227 216 215 149 48 20 35 90 155 184 187 180 180 169 156 151 153 155 153 155 156 104 14 10 9 15 33 36 44 2 18 76 247 247 250 250 249 249 253 252 250 252 244 224 218 225 224 226 228 228 231 230 187 153 110 81 94 139 170 177 173 169 160 164 165 163 161 155 145 48 14 23 22 27 43 46 60 11 28 87 247 252 251 252 254 253 251 250 250 246 236 225 216 217 224 226 230 226 218 217 174 161 185 186 179 160 160 175 178 173 168 169 171 167 158 151 96 19 22 46 44 43 64 54 75 25 38 97 252 252 252 254 252 247 245 247 247 248 249 246 243 221 200 193 189 189 198 205 170 154 168 173 178 174 165 167 168 163 165 170 171 163 150 120 51 25 41 66 58 63 77 55 76 45 53 106 253 252 252 246 242 240 243 244 245 246 249 248 250 243 229 229 231 233 234 237 232 204 170 175 175 174 165 154 152 157 165 167 163 142 120 96 45 31 47 70 68 77 94 63 64 70 66 111 252 248 241 236 238 242 246 248 250 250 249 247 247 240 228 230 234 237 234 233 241 239 187 158 163 161 155 146 151 161 166 157 131 117 116 98 40 44 62 79 70 113 112 74 46 81 82 117 246 237 235 240 246 249 252 254 253 251 249 249 249 235 215 219 224 222 222 219 220 224 188 154 157 153 151 154 163 161 142 117 118 133 125 98 45 55 73 83 87 143 113 73 49 70 110 131 241 238 244 248 251 253 253 252 251 248 240 233 226 217 210 219 219 215 216 216 210 200 173 164 159 153 156 160 148 122 101 119 149 134 126 96 48 52 75 103 119 157 107 73 61 53 114 137 241 245 250 252 253 253 250 246 242 237 229 221 213 206 210 203 186 172 145 130 152 161 167 166 152 143 132 114 95 94 121 148 144 129 134 95 46 49 97 114 147 151 104 82 72 57 101 146 246 250 252 252 250 247 242 238 232 227 220 212 204 199 179 173 167 105 65 63 90 134 161 134 112 92 78 81 99 124 147 151 133 134 134 84 42 73 112 122 155 131 105 96 88 78 105 162 250 251 250 248 244 240 237 230 225 220 215 207 199 167 108 151 122 88 71 84 120 127 105 76 71 78 90 106 123 146 155 148 130 141 119 69 54 89 104 138 152 122 114 101 97 88 110 152'], ..., ['169 162 172 183 183 180 156 119 94 195 249 143 130 108 76 51 38 37 45 53 74 122 160 175 197 223 247 254 254 248 211 162 128 125 155 161 161 162 160 171 169 166 166 166 169 168 166 169 164 162 190 180 200 146 109 117 196 250 189 219 125 47 26 25 60 92 94 134 161 201 236 253 255 255 254 255 255 254 255 211 171 109 70 61 85 122 125 127 131 150 164 169 172 170 171 171 158 178 151 198 160 98 130 175 253 189 218 145 57 50 59 64 101 151 187 206 234 255 255 255 255 255 255 255 255 255 255 241 206 176 132 82 59 51 58 97 120 133 158 168 166 165 166 169 169 168 185 174 127 132 149 238 174 230 169 144 99 73 77 109 170 198 226 255 255 255 255 255 255 255 255 255 255 255 255 255 227 193 167 141 63 39 36 107 125 125 158 168 168 163 166 171 166 184 194 142 96 157 190 214 182 194 207 135 122 97 120 146 226 240 253 255 255 255 255 255 255 255 255 255 255 255 255 254 255 222 185 170 121 50 33 71 143 144 156 175 169 166 171 173 193 195 175 124 120 148 209 168 207 155 229 147 124 86 184 186 233 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 242 191 173 160 104 59 120 157 169 186 171 157 174 168 173 179 176 90 170 150 196 139 211 144 255 179 108 102 200 213 248 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 247 196 186 156 78 127 159 187 192 211 116 166 172 168 169 154 87 200 151 182 174 191 157 254 195 106 111 189 221 254 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 243 201 164 114 125 153 199 209 249 139 170 177 170 166 105 153 155 148 164 192 163 145 220 180 105 149 215 248 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 238 185 172 100 136 170 201 242 248 188 135 170 175 163 114 145 146 79 165 171 144 141 196 162 126 167 215 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 253 207 152 127 141 169 192 248 222 233 116 129 169 144 126 124 139 56 165 191 126 153 201 154 156 204 247 254 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 249 164 92 163 186 206 246 232 248 115 110 141 105 113 133 81 59 101 186 124 118 167 160 174 233 253 255 255 255 255 255 255 255 255 255 255 255 255 255 254 254 255 254 252 255 255 254 255 253 186 63 129 182 196 238 235 223 143 123 126 94 109 113 59 57 54 156 116 108 144 170 187 254 253 255 255 255 255 255 255 255 255 255 255 255 255 255 194 99 75 80 95 145 196 194 255 255 227 85 128 161 174 228 247 186 136 138 122 78 106 92 66 65 55 158 132 112 130 178 157 162 205 230 255 254 255 255 255 255 255 255 255 254 255 142 74 68 126 180 220 254 254 253 252 218 232 131 136 164 176 210 233 171 124 137 117 76 88 70 61 57 55 97 119 95 103 103 107 63 59 71 102 126 205 247 254 255 255 255 255 255 193 139 140 167 187 224 250 254 255 254 255 251 238 161 124 161 153 175 204 104 109 108 115 54 63 68 56 50 55 81 93 96 104 141 166 163 134 126 127 108 130 184 244 255 254 255 255 255 194 184 194 214 253 230 181 203 239 232 230 243 251 188 146 144 139 135 144 84 118 110 117 60 51 59 45 42 53 71 80 81 100 141 159 205 227 201 168 154 148 157 222 254 255 255 255 254 228 225 229 171 255 255 255 254 255 137 202 225 242 225 158 147 134 132 105 77 90 119 101 62 38 42 34 41 47 58 71 86 96 114 132 201 251 255 148 153 172 160 208 255 255 255 255 254 228 234 200 255 116 34 30 61 55 112 216 231 241 233 168 137 121 105 79 66 73 100 105 60 31 30 24 53 53 53 71 83 88 86 119 95 84 57 64 211 162 173 185 238 255 255 255 254 229 233 250 113 88 16 32 176 111 61 191 250 255 255 194 137 112 90 76 63 72 76 84 59 28 23 21 45 44 55 70 85 93 49 40 78 66 20 171 170 102 255 217 225 255 255 255 254 254 252 166 250 253 250 252 218 183 184 255 255 255 254 228 124 87 67 78 57 68 77 53 54 31 21 29 46 41 55 72 89 79 24 57 162 96 170 255 253 255 255 243 236 252 255 255 255 255 255 255 228 208 202 201 197 208 255 255 255 255 255 255 141 65 60 69 58 72 85 39 40 28 21 25 40 44 53 70 83 89 88 140 160 177 196 203 252 255 255 255 253 253 254 255 255 255 255 254 253 255 254 255 255 255 254 255 255 255 255 254 189 63 65 71 58 77 84 31 33 25 23 28 30 40 55 73 125 129 151 190 208 231 251 254 254 255 255 255 254 251 254 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 254 241 109 69 70 62 102 101 27 38 23 24 31 30 46 57 76 134 144 176 221 249 255 255 255 255 255 255 255 254 253 254 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 254 243 118 99 74 72 68 82 25 22 20 26 32 37 43 59 114 168 216 238 249 254 255 255 255 255 255 255 255 252 253 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 254 255 233 132 75 93 92 86 84 31 31 31 35 39 43 49 62 125 235 247 255 255 254 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 255 249 224 121 31 89 89 74 105 27 29 32 38 45 43 50 79 202 250 253 254 255 255 255 255 255 255 255 255 225 224 255 255 255 255 255 254 236 253 255 255 255 255 255 255 255 255 254 252 235 216 109 64 39 59 108 169 36 40 38 39 41 42 42 139 200 214 240 255 255 255 255 255 255 255 254 239 215 252 255 255 255 255 254 255 255 213 251 255 255 255 255 255 255 255 254 243 229 201 89 103 68 34 28 123 48 37 47 39 29 34 50 155 199 210 222 235 253 255 255 255 255 255 251 167 254 228 223 244 247 247 222 170 253 216 216 254 255 255 255 255 255 251 236 225 221 178 98 93 79 55 41 152 48 39 44 24 34 72 60 117 188 194 214 232 255 253 255 255 255 254 205 153 182 151 140 134 123 119 143 85 110 248 252 213 247 254 254 255 254 243 236 226 219 184 106 79 84 80 19 138 45 53 27 37 56 77 35 77 170 182 200 226 251 253 255 254 250 234 202 227 203 87 56 96 107 111 97 80 193 252 253 235 236 248 255 254 249 239 229 222 216 194 116 96 130 69 40 144 47 33 18 67 54 78 38 57 151 174 188 208 225 237 246 243 242 218 224 249 221 129 121 116 103 105 122 144 221 253 255 254 250 236 231 224 233 233 226 219 207 196 120 104 76 57 54 90 43 33 29 69 71 67 46 49 136 182 193 198 226 233 239 239 238 238 251 255 251 177 154 135 114 114 158 231 248 254 255 255 255 245 236 233 235 234 226 230 227 215 136 16 83 86 48 99 44 47 55 76 65 79 53 54 136 174 184 184 186 203 218 212 234 251 255 255 255 234 205 173 159 175 252 247 255 254 255 254 255 252 230 221 220 220 229 233 227 211 112 48 64 37 46 88 52 40 82 67 43 73 72 66 105 162 173 179 178 179 192 212 233 251 254 255 255 254 252 255 254 252 206 159 220 254 254 220 223 251 249 226 217 221 234 236 234 225 109 21 22 20 59 102 45 56 72 48 26 75 57 3 89 161 168 171 179 188 190 208 224 248 252 243 255 132 35 33 42 48 44 50 36 37 47 80 130 243 252 243 234 233 234 237 228 216 87 12 14 20 48 78 65 63 50 33 33 57 55 26 49 166 170 170 173 172 178 200 222 229 129 85 8 2 10 9 25 76 166 241 219 179 155 145 160 207 249 255 249 241 239 237 234 187 46 17 23 39 67 98 56 38 46 35 39 27 18 15 15 151 163 169 167 171 174 178 206 171 91 15 46 132 156 176 171 165 148 109 93 132 198 255 253 224 235 250 242 236 229 234 222 112 24 19 31 35 96 81 36 38 41 42 43 23 20 22 16 132 159 165 164 163 176 192 191 164 158 242 209 139 127 130 122 98 110 164 176 242 255 255 255 243 230 236 241 243 236 223 209 76 22 26 36 93 84 86 29 32 36 43 53 41 29 20 19 58 161 153 156 160 160 187 189 180 202 223 200 183 171 138 118 100 119 149 179 214 252 255 252 252 230 221 229 231 224 208 119 28 32 41 60 67 75 76 37 27 25 31 56 59 33 25 13 39 131 142 153 153 151 167 182 180 183 191 195 183 157 155 165 179 237 244 255 253 255 255 254 241 227 215 213 221 220 159 37 13 20 41 49 59 52 53 85 34 35 32 44 50 51 42 36 37 95 138 140 151 152 152 161 175 184 189 209 235 234 252 255 255 255 255 255 255 255 255 255 245 244 213 209 206 190 91 14 14 33 43 48 48 46 52 132 80 36 36 37 41 53 55 51 28 63 128 144 143 152 153 151 168 187 198 228 253 253 255 255 255 255 255 255 255 255 255 255 255 242 213 191 171 143 83 11 17 30 40 42 42 43 96 139 138 135 96 39 23 27 41 51 43 32 122 119 127 131 141 154 173 208 229 229 248 255 255 255 255 255 255 255 255 254 253 248 240 199 165 145 135 152 72 5 18 32 45 40 57 138 137 143 141 135 115 12 11 14 17 23 59 42 118 120 115 117 124 138 156 192 213 219 226 248 255 254 255 255 255 254 254 242 241 246 220 181 124 112 149 182 85 9 15 43 38 43 85 143 137 138 137 128 47 28 14 17 9 9 32 31 124 134 122 121 119 114 133 159 189 207 193 206 216 219 219 221 222 222 207 204 212 200 148 117 117 150 176 181 69 18 34 36 46 39 61 144 134 136 137 82 37 26 16 15 14 13 19 27 127 147 137 133 118 113 112 113 129 147 158 157 161 166 162 154 151 153 158 151 145 119 107 123 140 172 197 190 67 26 35 32 31 29 28 49 139 139 134 33 42 23 15 13 13 18 14 18 121 157 150 143 131 120 114 111 107 110 114 123 117 116 120 124 116 107 104 107 103 108 127 138 163 188 199 200 78 37 33 28 27 31 33 40 63'], ['93 93 93 93 94 93 80 94 138 132 127 119 117 120 124 124 124 128 129 131 136 132 130 134 129 125 121 116 110 109 108 107 106 102 98 95 98 96 97 96 91 85 74 70 68 64 66 67 93 93 93 93 93 96 87 106 134 128 123 118 118 122 125 129 132 135 134 135 137 136 136 136 131 130 129 123 116 113 112 108 104 102 101 99 96 96 95 94 88 85 81 77 74 67 67 74 93 93 93 93 93 96 96 108 128 120 117 116 120 127 134 138 141 142 145 141 138 136 139 139 137 136 131 128 123 118 115 109 105 104 103 101 97 94 93 90 89 86 84 79 73 73 74 75 93 93 93 93 92 102 100 110 124 117 116 115 119 129 137 140 141 143 142 140 141 144 146 144 138 134 132 129 122 120 119 114 109 104 100 100 99 96 94 91 85 84 86 81 80 79 78 77 93 93 93 93 91 107 111 106 117 116 116 116 121 129 135 138 141 144 144 142 144 144 143 142 139 137 136 132 125 122 119 115 112 106 100 98 95 93 90 88 88 89 86 85 86 84 80 81 93 93 93 93 92 108 113 111 115 114 115 121 126 131 135 141 145 148 146 145 147 148 148 146 143 139 136 133 127 122 117 116 113 108 104 96 92 92 89 87 88 89 89 88 86 83 82 79 93 93 93 93 91 115 120 111 114 114 116 123 128 130 135 140 148 147 146 148 149 150 150 148 143 139 134 129 124 120 120 116 113 111 107 101 95 94 91 92 92 93 96 92 89 89 84 75 93 93 93 94 90 113 131 111 114 116 119 124 127 130 133 136 141 142 145 145 148 147 147 143 138 136 126 119 113 113 114 113 111 108 105 100 95 92 92 91 92 97 97 95 97 88 83 75 93 93 93 93 93 99 131 116 114 117 121 123 128 130 132 130 134 141 143 145 145 144 143 139 137 132 125 117 114 113 113 114 110 106 104 100 95 90 89 90 95 98 99 100 99 89 84 75 93 93 93 93 93 97 132 122 114 113 120 125 129 129 130 129 130 135 138 141 145 145 145 141 138 138 135 125 118 116 117 114 111 104 100 97 96 90 87 90 92 97 105 108 99 89 83 81 93 93 93 93 94 90 120 128 111 112 119 126 127 129 130 130 132 135 137 141 144 145 146 141 142 142 139 133 125 121 121 114 114 108 100 93 91 89 85 86 90 95 101 110 107 90 83 76 93 93 93 93 94 91 113 132 117 115 120 125 128 128 130 134 136 141 143 145 146 147 149 146 146 144 144 142 137 133 129 121 112 100 90 76 76 88 90 92 93 94 98 109 107 92 79 67 94 93 93 93 93 91 100 135 124 120 122 121 126 129 131 137 143 151 155 157 155 153 154 155 150 149 146 140 127 114 97 81 67 55 46 44 52 60 69 87 101 93 98 106 110 93 73 63 91 92 94 93 93 93 96 134 129 123 116 107 116 119 133 147 152 157 160 161 161 162 160 156 152 149 140 113 79 60 43 36 36 42 43 39 48 48 49 63 99 102 101 105 113 92 67 65 124 99 90 93 93 94 91 125 139 113 80 77 84 84 94 107 123 143 154 155 154 161 160 152 149 146 126 102 94 90 90 94 103 107 107 97 91 85 74 70 90 112 103 104 113 99 74 64 139 135 112 92 92 95 89 125 132 69 52 49 45 45 49 52 73 111 134 142 146 149 150 146 145 142 121 116 125 127 130 130 131 130 123 121 116 107 97 98 98 108 105 104 112 105 75 58 136 137 141 126 100 89 105 153 119 66 61 70 83 82 84 92 97 105 120 133 142 145 148 145 143 136 120 120 128 133 126 122 120 121 112 114 114 110 109 104 106 107 105 103 112 108 74 70 136 136 135 139 135 111 133 149 136 119 116 121 135 146 144 139 136 133 123 122 131 140 148 146 140 128 115 112 121 111 92 90 84 57 49 52 63 79 91 99 100 103 100 100 109 109 84 92 136 136 136 136 137 142 142 137 143 145 137 130 120 114 110 98 103 114 124 118 125 137 150 145 133 118 100 105 109 84 83 158 132 54 25 74 97 42 58 79 98 100 95 99 106 112 93 100 136 136 136 136 136 139 132 128 141 133 117 79 85 58 50 39 52 90 87 114 115 127 142 140 127 105 90 90 75 54 108 179 183 54 40 91 86 66 71 73 94 100 93 95 103 114 97 92 136 136 136 136 137 137 137 140 133 116 78 50 159 166 56 34 80 104 62 92 112 121 138 134 121 98 86 82 75 98 96 101 113 84 73 82 90 96 87 81 84 96 94 93 102 116 97 69 136 136 136 136 137 137 137 149 128 106 102 98 88 129 96 83 83 81 101 98 106 120 133 131 118 102 84 84 85 100 121 126 120 116 116 117 108 100 93 88 84 89 92 97 103 115 108 88 136 136 136 136 136 136 134 152 129 105 109 117 113 108 121 129 113 110 111 111 110 119 128 124 117 108 96 90 96 101 123 130 127 125 123 122 119 115 110 102 93 91 94 97 103 114 107 103 136 136 136 136 136 136 134 149 135 115 116 120 126 134 129 127 126 127 124 122 112 115 122 120 108 101 101 100 98 107 115 130 132 126 128 128 126 122 119 112 104 97 96 99 102 114 112 95 136 136 136 136 136 136 135 141 139 125 124 128 130 134 135 136 136 132 126 116 106 114 123 124 108 98 95 96 96 102 111 119 127 130 128 128 127 126 123 116 107 98 96 100 104 116 115 81 136 136 136 136 136 136 136 139 140 130 132 134 137 140 140 142 142 133 123 109 112 122 122 118 110 101 90 84 89 102 105 111 120 128 131 132 131 127 124 121 110 101 99 102 105 117 119 84 136 136 136 136 136 136 136 138 143 135 135 141 147 144 143 142 136 137 123 106 124 128 126 118 109 102 82 75 74 86 96 108 114 123 129 129 130 127 124 120 114 108 100 100 105 120 120 88 136 136 136 136 136 136 136 136 144 137 133 140 148 142 136 134 134 138 118 117 133 134 136 129 116 110 100 94 87 71 80 104 116 122 124 129 128 127 125 118 114 110 103 99 103 118 121 99 136 136 136 136 136 136 137 136 142 137 133 137 139 137 134 134 138 135 128 134 138 160 163 142 131 127 135 126 111 78 69 103 123 122 122 125 127 127 123 117 115 109 103 98 103 117 121 107 136 136 136 136 136 136 136 137 138 134 132 133 136 137 137 137 139 142 155 150 145 161 158 148 141 132 142 144 132 82 76 121 128 124 124 125 127 126 121 115 112 108 102 96 101 115 118 110 136 136 136 136 136 136 136 136 137 136 133 133 134 138 137 138 140 134 144 114 121 145 144 134 106 87 114 147 117 74 99 123 129 124 123 125 128 127 121 115 112 107 98 94 102 114 111 114 136 136 136 136 136 136 136 136 139 138 134 133 136 138 132 135 132 114 108 103 111 118 132 117 93 107 95 87 78 96 109 112 124 122 119 120 123 124 118 113 111 104 96 95 102 112 107 113 136 136 136 136 136 136 137 135 141 140 135 135 135 135 132 132 122 112 103 90 95 105 118 112 103 88 77 79 93 103 106 108 112 116 117 117 118 119 118 113 110 100 94 95 105 108 100 106 136 136 136 136 136 136 137 136 137 138 134 135 132 130 130 123 116 112 104 102 102 98 98 103 96 87 90 89 92 100 102 106 107 111 116 113 116 115 116 113 105 95 93 98 104 102 96 100 136 136 136 136 136 136 136 136 136 137 134 136 134 128 126 118 113 110 106 105 102 92 82 93 89 84 88 87 89 97 102 102 104 104 113 113 115 115 111 106 102 94 92 98 100 96 95 100 136 136 136 136 136 136 136 136 137 138 138 135 133 128 124 112 110 106 103 107 106 99 84 91 95 93 100 99 94 92 100 103 99 99 105 116 114 115 111 104 96 95 92 96 94 89 95 99 136 136 136 136 136 136 136 136 137 137 139 133 136 132 115 111 110 110 120 118 108 101 112 111 99 98 101 105 105 106 104 105 107 97 94 113 114 113 108 99 92 92 93 89 87 86 93 94 136 136 136 136 136 136 136 136 136 136 138 131 137 134 114 114 122 128 122 113 108 104 107 100 105 109 102 101 102 107 112 107 117 111 101 110 116 111 104 94 92 95 90 86 84 84 89 90 136 136 136 136 136 136 136 136 136 136 138 133 130 136 119 124 120 107 91 92 94 97 97 99 90 84 79 80 80 70 62 59 91 118 111 109 114 105 99 92 93 94 84 78 78 79 79 85 136 136 136 136 136 136 136 136 136 136 136 139 131 132 123 117 93 69 90 92 90 95 89 93 92 90 86 84 89 71 47 63 89 105 107 110 107 102 94 91 92 84 77 75 75 73 74 78 136 136 136 136 136 136 136 136 136 136 137 137 133 130 127 102 97 91 111 121 128 135 121 132 131 133 119 111 102 88 82 93 106 105 101 113 105 96 88 88 86 77 72 71 69 70 74 74 136 136 136 136 136 136 136 136 136 136 136 136 137 128 126 115 112 111 111 121 139 133 125 125 121 123 114 104 100 95 96 105 112 110 103 105 101 95 90 86 81 73 67 67 63 71 72 71 136 136 136 136 136 136 136 136 136 136 136 136 138 133 124 117 119 123 124 125 124 115 104 97 103 108 112 113 105 102 106 113 110 105 98 99 100 94 88 82 76 70 66 62 63 71 70 70 136 136 136 136 136 136 136 136 136 136 136 136 137 136 124 115 113 120 119 126 124 120 114 103 108 118 118 118 112 107 106 105 98 91 88 93 96 86 84 82 74 69 62 63 68 69 66 94 136 136 136 136 136 136 136 136 136 136 136 136 135 138 138 118 101 110 121 125 125 121 106 97 101 106 114 118 114 112 104 92 85 78 77 80 81 78 75 74 73 65 63 66 68 72 75 171 136 136 136 136 136 136 136 136 136 136 136 136 142 135 103 102 96 94 112 124 122 111 105 105 107 106 104 109 112 115 103 89 81 75 73 73 74 70 68 72 66 62 65 68 73 67 152 212 136 136 136 136 136 136 136 136 136 136 137 138 116 69 37 55 95 84 102 120 121 116 114 122 126 118 112 110 114 114 104 92 83 77 71 67 66 64 66 64 58 65 69 78 71 141 217 199 136 136 136 136 136 136 136 136 136 138 134 88 51 37 35 44 125 93 99 112 116 115 116 129 130 121 118 111 108 104 99 93 84 75 67 59 60 64 61 59 62 71 79 77 115 218 209 202'], ['18 19 19 17 30 50 48 47 47 49 52 52 48 49 52 49 47 50 53 53 57 54 49 48 63 81 109 129 139 141 134 131 127 125 130 133 118 102 79 46 43 39 11 6 7 5 5 5 20 20 17 30 52 50 50 48 44 53 49 46 44 48 51 51 52 53 57 55 49 56 90 132 159 166 166 164 158 155 140 143 142 143 148 144 137 121 121 82 35 38 18 4 7 6 5 5 19 17 30 47 47 45 50 43 44 54 48 47 47 49 51 52 57 58 54 46 80 152 188 192 180 174 169 164 162 157 150 139 145 152 157 150 145 129 126 105 44 33 26 4 8 6 5 5 17 30 49 48 45 44 45 46 49 50 48 48 50 49 50 59 58 48 58 112 158 171 173 179 182 175 169 166 164 158 153 146 149 155 158 156 147 134 124 94 30 19 30 4 6 6 5 5 29 49 48 48 46 46 45 51 52 44 48 51 46 50 56 58 50 79 129 126 144 167 166 164 175 177 167 161 157 156 157 152 150 158 159 156 147 111 76 35 7 6 20 7 6 5 5 5 47 49 49 46 46 48 50 50 52 47 47 47 50 55 59 51 87 106 94 106 136 123 104 99 98 89 99 120 126 136 153 151 148 155 159 156 131 81 48 24 14 13 8 8 6 5 5 5 49 48 48 44 49 50 48 49 48 51 47 50 55 58 47 61 86 79 108 106 87 81 77 74 85 69 73 98 110 120 139 148 150 154 155 145 111 66 55 74 84 49 13 7 7 6 6 5 49 44 45 49 48 46 47 46 46 49 49 56 50 44 75 122 130 144 168 150 134 136 123 121 123 130 126 121 117 120 134 147 148 147 142 125 83 71 105 127 109 49 11 6 7 6 5 5 45 46 46 49 48 44 43 47 44 50 58 50 48 73 137 167 174 176 179 191 194 171 159 149 134 123 134 140 136 141 148 145 144 139 128 94 77 113 132 117 98 42 7 6 7 6 5 6 47 46 48 45 49 45 45 46 48 56 53 57 81 132 156 166 167 173 168 186 176 152 130 115 105 110 117 135 145 147 156 153 146 136 102 77 117 118 31 22 31 29 9 6 6 6 5 7 47 48 43 45 46 49 47 51 59 55 43 64 121 163 170 177 168 152 142 137 142 95 23 22 51 84 119 130 152 165 175 166 149 125 88 101 164 167 27 28 46 22 6 6 6 6 6 6 50 46 48 46 42 51 53 55 50 54 44 62 156 176 174 164 147 135 118 154 216 110 11 43 98 97 114 148 183 195 194 174 156 126 92 114 112 123 91 71 64 42 3 6 6 6 6 8 46 44 44 44 50 53 56 51 62 90 24 88 166 184 188 169 158 157 161 157 155 135 77 89 95 95 118 156 191 198 198 177 163 139 93 98 109 108 115 109 82 64 7 6 7 6 6 8 44 43 44 44 47 56 55 54 112 56 54 129 172 188 184 160 161 166 174 180 166 149 142 123 109 113 142 150 169 181 188 184 168 150 106 92 111 119 117 119 101 85 22 4 7 6 6 6 43 47 50 50 52 54 48 109 92 80 121 155 174 178 175 165 170 181 179 182 178 164 147 131 128 133 158 167 168 174 181 190 180 162 122 95 105 119 124 119 115 103 47 2 7 7 6 6 48 50 51 52 56 51 103 131 99 138 138 163 172 171 172 171 173 176 178 175 172 163 152 142 140 151 164 171 174 174 181 191 180 163 138 98 101 113 124 124 121 115 72 7 6 6 6 5 47 53 51 59 50 88 142 128 143 153 148 166 170 172 183 179 176 182 180 173 168 163 153 146 151 164 170 173 177 177 188 186 172 168 158 126 94 112 126 127 124 118 96 21 4 7 6 6 47 52 53 52 53 114 140 142 152 154 158 164 172 175 183 186 180 186 192 182 173 168 160 155 162 169 174 179 183 186 197 182 173 167 161 156 117 103 130 130 127 121 110 33 3 7 6 6 49 61 55 43 58 115 136 146 148 155 164 166 172 173 184 193 196 198 195 186 173 172 169 169 171 169 175 179 185 197 193 174 165 159 161 165 154 114 123 135 131 123 108 39 2 7 5 5 53 58 44 52 64 110 137 143 145 155 163 168 175 179 187 192 200 203 194 187 182 175 173 177 174 173 175 176 182 191 163 153 152 157 176 176 164 142 106 135 135 122 110 41 2 7 5 5 51 46 43 62 79 109 133 143 144 152 158 167 177 181 185 191 197 198 198 192 187 178 174 180 179 173 172 172 175 162 149 155 160 184 209 191 169 154 104 116 139 127 114 45 1 7 5 5 44 54 61 65 78 112 130 140 144 152 162 171 177 180 185 195 194 195 201 198 191 182 183 182 178 173 171 170 165 152 163 176 202 208 194 183 168 143 100 114 136 131 119 45 2 7 5 5 58 98 69 74 69 102 127 136 144 152 161 171 178 182 183 192 194 198 199 202 198 188 186 187 181 174 168 173 166 159 175 196 151 115 137 153 153 124 125 124 141 129 117 38 2 8 5 5 127 115 71 85 82 98 128 137 143 149 158 169 175 179 183 187 193 199 200 202 200 196 191 189 183 176 171 177 167 154 178 148 69 138 144 123 118 118 127 127 142 128 111 27 3 7 5 6 153 124 75 73 91 95 130 139 142 146 140 166 178 179 181 187 190 196 194 197 200 196 191 189 188 179 175 179 174 152 147 140 171 174 137 146 151 104 107 125 134 124 99 15 5 6 6 6 171 135 89 67 76 79 122 139 146 150 148 165 176 181 183 185 188 195 187 189 198 198 196 193 191 181 178 183 183 176 168 177 185 179 153 162 177 125 112 119 125 120 77 5 7 6 5 5 181 138 101 82 74 79 111 142 148 153 160 164 173 180 181 187 189 186 181 190 199 199 197 194 191 186 184 187 188 190 190 187 183 182 165 148 183 145 109 110 117 104 51 2 8 6 5 5 118 98 112 93 87 91 99 133 148 153 161 164 174 178 181 185 185 180 189 195 196 199 198 195 195 192 188 189 190 191 185 186 185 182 172 139 156 155 116 94 107 88 25 3 7 6 5 6 76 82 107 101 95 99 110 122 145 156 160 168 174 179 180 182 185 185 189 192 194 197 200 199 198 195 192 193 190 192 185 187 178 178 176 157 143 155 124 93 99 71 10 7 7 6 6 6 98 87 100 107 104 108 119 132 148 158 163 169 174 178 180 181 183 187 190 192 196 197 197 197 199 197 199 193 189 185 185 189 190 178 145 141 146 122 89 84 97 52 4 8 7 6 6 6 88 96 100 111 113 116 122 134 146 157 164 171 174 179 180 184 184 186 189 190 194 198 199 201 199 198 197 182 177 179 180 187 160 132 125 113 102 73 68 91 114 34 2 8 7 7 6 6 72 102 102 107 112 115 122 134 144 155 163 170 174 181 182 182 184 181 184 187 192 197 198 196 195 195 185 172 159 168 155 127 113 114 101 97 96 72 77 137 117 17 4 8 6 7 6 6 50 106 110 107 110 112 121 131 140 152 159 164 173 178 178 182 183 181 182 185 188 190 193 192 190 188 177 133 116 124 123 128 137 133 124 115 121 111 76 123 92 3 7 7 7 7 6 6 39 102 116 114 111 109 114 129 136 146 154 158 167 172 176 180 181 182 183 179 180 185 191 192 188 186 157 138 160 154 162 171 174 166 144 126 118 121 81 102 54 1 8 7 6 7 6 6 30 100 117 119 115 111 111 122 130 139 151 159 160 168 172 174 176 180 182 180 183 187 189 190 188 181 160 182 187 184 182 175 171 166 146 133 121 107 77 81 17 5 7 7 7 7 6 6 18 91 120 123 122 117 112 112 120 135 143 153 157 163 168 170 176 179 180 181 181 184 186 186 187 179 179 188 188 189 183 180 173 163 148 129 113 96 82 50 3 7 7 7 6 7 6 6 13 82 120 128 131 126 121 115 113 122 137 143 150 157 162 167 171 175 177 178 180 183 186 186 182 177 186 187 189 188 181 172 165 156 133 112 116 103 89 24 5 8 7 7 7 7 6 6 11 66 117 131 137 136 127 122 115 114 125 137 143 148 154 161 162 169 174 178 178 180 181 180 180 180 181 182 183 183 180 175 162 142 122 124 120 109 82 11 8 8 7 6 6 7 5 5 12 58 117 132 140 145 141 133 127 117 118 125 135 142 148 153 158 162 168 172 173 174 177 182 181 181 181 181 177 176 175 168 155 133 128 130 118 122 67 3 8 7 7 6 6 6 6 6 14 55 115 132 143 148 149 145 137 128 121 116 123 130 137 143 151 156 162 166 168 175 179 181 181 183 186 182 177 176 172 163 154 148 154 144 137 125 47 2 8 6 7 7 6 6 6 5 21 56 110 132 144 152 156 154 147 139 130 122 122 125 130 132 140 149 154 161 167 173 176 181 183 181 186 185 182 183 179 176 174 169 164 152 141 114 26 4 8 7 7 7 6 6 6 5 121 72 103 129 145 156 159 159 157 151 141 130 126 125 130 130 129 134 143 152 161 164 169 173 179 184 183 184 187 183 186 186 181 174 165 153 137 99 10 6 7 7 7 7 6 6 5 5 184 77 102 129 146 156 161 165 165 162 154 140 131 128 127 129 127 122 127 134 142 150 157 163 171 177 180 184 186 188 188 189 184 174 162 146 135 66 1 9 7 7 8 7 6 6 5 5 176 83 106 128 148 158 163 168 170 169 164 157 144 135 129 127 124 120 117 118 124 131 140 148 157 164 172 178 179 184 185 184 179 167 152 135 104 18 5 8 7 7 7 7 7 6 6 6 170 91 110 130 148 159 164 167 173 176 172 167 159 148 138 131 126 123 118 110 107 109 117 128 141 150 157 160 169 172 170 164 162 154 132 99 27 3 8 7 7 7 7 6 7 7 6 6 188 105 114 131 148 160 167 171 178 180 179 177 173 165 159 148 140 136 130 124 115 105 100 101 107 120 136 144 149 153 152 146 130 102 61 14 2 8 6 8 8 7 6 7 7 6 6 6 231 134 117 133 150 161 168 170 177 183 182 182 180 179 174 168 161 156 152 150 147 144 142 131 116 99 90 90 92 92 86 76 71 26 0 1 6 7 7 8 7 7 6 7 7 6 6 6 221 196 123 137 152 160 167 175 177 180 185 190 188 184 179 179 174 167 166 163 163 159 157 156 150 138 115 89 76 80 68 90 214 118 34 34 21 10 7 10 14 6 6 7 7 6 6 6']], dtype=object)
y
0 0 1 0 2 2 3 4 4 6 .. 62491 6 62492 6 62493 6 62494 6 62495 6 Name: emotion, Length: 62496, dtype: int64
print("X.shape =", X.shape)
print("y.shape =", y.shape)
X.shape = (62496, 1) y.shape = (62496,)
print("y.value_counts() =", y.value_counts())
y.value_counts() = 0 8928 2 8928 4 8928 6 8928 3 8928 5 8928 1 8928 Name: emotion, dtype: int64
# Flatten the NumPy array X and reshape it into a one-dimensional array and then convert it into a pandas Series.
X = pd.Series(X.flatten())
X
0 70 80 82 72 58 58 60 63 54 58 60 48 89 115 121... 1 151 150 147 155 148 133 111 140 170 174 182 15... 2 231 212 156 164 174 138 161 173 182 200 106 38... 3 24 32 36 30 32 23 19 20 30 41 21 22 32 34 21 1... 4 4 0 0 0 0 0 0 0 0 0 0 0 3 15 23 28 48 50 58 84... ... 62491 38 38 36 42 53 131 142 174 164 172 208 186 170... 62492 139 140 142 142 143 143 143 144 144 144 145 14... 62493 169 162 172 183 183 180 156 119 94 195 249 143... 62494 93 93 93 93 94 93 80 94 138 132 127 119 117 12... 62495 18 19 19 17 30 50 48 47 47 49 52 52 48 49 52 4... Length: 62496, dtype: object
# Convert the array X of strings to an array of floats and sacele the values between 0 and 1.
X = np.array(list(map(str.split, X)), np.float32)
# Pixel values are typically between 0 and 255. (256 total values)
# Normalize the data.
X /= 255
# Print the first 10 elements of X.
X[:10]
array([[0.27450982, 0.3137255 , 0.32156864, ..., 0.41568628, 0.42745098, 0.32156864], [0.5921569 , 0.5882353 , 0.5764706 , ..., 0.75686276, 0.7176471 , 0.72156864], [0.90588236, 0.83137256, 0.6117647 , ..., 0.34509805, 0.43137255, 0.59607846], ..., [0.3019608 , 0.30588236, 0.30980393, ..., 0.49019608, 0.2627451 , 0.26666668], [0.33333334, 0.32941177, 0.3529412 , ..., 0.22745098, 0.28627452, 0.32941177], [1. , 0.99607843, 1. , ..., 0.99607843, 1. , 1. ]], dtype=float32)
X = X.reshape(-1, 48, 48, 1)
X.shape
(62496, 48, 48, 1)
y = np.array(y)
y = y.reshape(y.shape[0], 1)
y.shape
(62496, 1)
X_train, X_remain, y_train, y_remain = train_test_split(
X, y, test_size = 0.2, random_state = 42)
X_valid, X_test, y_valid, y_test = train_test_split(
X_remain, y_remain, test_size = 0.5, random_state = 42)
# A 4D-array is a matrix of matrices:
# Specifying one index gives you an array of matrices.
# Specifying two indices gives you a matrix. (Now problem is in 2D.)
# Specifying three indices gives you an array.
# Specifying four indices gives you a single element.
# (row_num, width, height, channel)
print("X_train.shape = ", X_train.shape) # 4 Dimensions.
print("X_valid.shape = ", X_valid.shape) # 4 Dimensions.
print("X_test.shape = ", X_test.shape) # 4 Dimensions.
print()
# A 2D-array is a matrix (an array of arrays):
# Specifying one index gives you an array.
# Specifying two indices gives you a single element.
print("y_train.shape = ", y_train.shape) # 2 Dimensions.
print("y_valid.shape = ", y_valid.shape) # 2 Dimensions.
print("y_test.shape = ", y_test.shape) # 2 Dimensions.
X_train.shape = (49996, 48, 48, 1) X_valid.shape = (6250, 48, 48, 1) X_test.shape = (6250, 48, 48, 1) y_train.shape = (49996, 1) y_valid.shape = (6250, 1) y_test.shape = (6250, 1)
early_stopping = EarlyStopping(monitor = "val_accuracy",
patience = 10,
mode = "auto",
restore_best_weights = True)
reduce_learning_rate = ReduceLROnPlateau(monitor ="val_accuracy",
factor = 0.5,
verbose = 1,
min_lr = 0.00001)
# Create the neural network.
model = models.Sequential()
model.add(Input((48, 48, 1)))
model.add(layers.Conv2D(32, kernel_size=(3, 3), strides=(1, 1), padding="valid"))
model.add(layers.BatchNormalization(axis=3))
model.add(layers.Activation("relu"))
model.add(layers.Conv2D(64, (3, 3), strides=(1, 1), padding="same"))
model.add(layers.BatchNormalization(axis=3))
model.add(layers.Activation("relu"))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), strides=(1, 1), padding="valid"))
model.add(layers.BatchNormalization(axis=3))
model.add(layers.Activation("relu"))
model.add(layers.Conv2D(128, (3, 3), strides=(1, 1), padding="same"))
model.add(layers.BatchNormalization(axis=3))
model.add(layers.Activation("relu"))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(128, (3, 3), strides=(1, 1), padding="valid"))
model.add(layers.BatchNormalization(axis=3))
model.add(layers.Activation("relu"))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Reshape((-1, 128)))
model.add(layers.LSTM(128))
model.add(layers.Reshape((-1, 64)))
model.add(layers.LSTM(64))
model.add(layers.Dense(200, activation="relu"))
model.add(layers.Dropout(0.6))
model.add(layers.Dense(7, activation="softmax"))
adam_optimizer = Adam(learning_rate=0.0002)
model.compile(optimizer = adam_optimizer, loss = "categorical_crossentropy", metrics=["accuracy"])
print(model.summary())
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d (Conv2D) (None, 46, 46, 32) 320 batch_normalization (BatchN (None, 46, 46, 32) 128 ormalization) activation (Activation) (None, 46, 46, 32) 0 conv2d_1 (Conv2D) (None, 46, 46, 64) 18496 batch_normalization_1 (Batc (None, 46, 46, 64) 256 hNormalization) activation_1 (Activation) (None, 46, 46, 64) 0 max_pooling2d (MaxPooling2D (None, 23, 23, 64) 0 ) conv2d_2 (Conv2D) (None, 21, 21, 64) 36928 batch_normalization_2 (Batc (None, 21, 21, 64) 256 hNormalization) activation_2 (Activation) (None, 21, 21, 64) 0 conv2d_3 (Conv2D) (None, 21, 21, 128) 73856 batch_normalization_3 (Batc (None, 21, 21, 128) 512 hNormalization) activation_3 (Activation) (None, 21, 21, 128) 0 max_pooling2d_1 (MaxPooling (None, 10, 10, 128) 0 2D) conv2d_4 (Conv2D) (None, 8, 8, 128) 147584 batch_normalization_4 (Batc (None, 8, 8, 128) 512 hNormalization) activation_4 (Activation) (None, 8, 8, 128) 0 max_pooling2d_2 (MaxPooling (None, 4, 4, 128) 0 2D) reshape (Reshape) (None, 16, 128) 0 lstm (LSTM) (None, 128) 131584 reshape_1 (Reshape) (None, 2, 64) 0 lstm_1 (LSTM) (None, 64) 33024 dense (Dense) (None, 200) 13000 dropout (Dropout) (None, 200) 0 dense_1 (Dense) (None, 7) 1407 ================================================================= Total params: 457,863 Trainable params: 457,031 Non-trainable params: 832 _________________________________________________________________ None
plot_model(model, to_file = "fervi_model.png", show_shapes = True, show_dtype = True)
# Convert an integer target vector y_train into a one-hot encoded matrix.
# 7 is the classes of emotions.
# 0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral
y_train = to_categorical(y_train, 7)
y_train.shape
(49996, 7)
# Convert an integer target vector y_train into a one-hot encoded matrix.
# 7 is the classes of emotions.
# 0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral
y_valid = to_categorical(y_valid, 7)
y_valid.shape
(6250, 7)
# Convert an integer target vector y_train into a one-hot encoded matrix.
# 7 is the classes of emotions.
# 0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral
y_test = to_categorical(y_test, 7)
y_test.shape
(6250, 7)
history_callback_object = model.fit(x=X_train, y=y_train, epochs = 100, callbacks=[early_stopping, reduce_learning_rate], validation_data=(X_valid, y_valid))
Epoch 1/100 1563/1563 [==============================] - 30s 13ms/step - loss: 1.5329 - accuracy: 0.4057 - val_loss: 1.2963 - val_accuracy: 0.5045 - lr: 2.0000e-04 Epoch 2/100 1563/1563 [==============================] - 19s 12ms/step - loss: 1.0955 - accuracy: 0.5793 - val_loss: 1.1046 - val_accuracy: 0.5662 - lr: 2.0000e-04 Epoch 3/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.9284 - accuracy: 0.6485 - val_loss: 1.0462 - val_accuracy: 0.6122 - lr: 2.0000e-04 Epoch 4/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.8116 - accuracy: 0.6998 - val_loss: 0.9215 - val_accuracy: 0.6555 - lr: 2.0000e-04 Epoch 5/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.7030 - accuracy: 0.7468 - val_loss: 0.8160 - val_accuracy: 0.7045 - lr: 2.0000e-04 Epoch 6/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.6009 - accuracy: 0.7891 - val_loss: 0.8139 - val_accuracy: 0.6926 - lr: 2.0000e-04 Epoch 7/100 1563/1563 [==============================] - 20s 13ms/step - loss: 0.5146 - accuracy: 0.8233 - val_loss: 0.7880 - val_accuracy: 0.7203 - lr: 2.0000e-04 Epoch 8/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.4340 - accuracy: 0.8522 - val_loss: 0.8861 - val_accuracy: 0.7110 - lr: 2.0000e-04 Epoch 9/100 1563/1563 [==============================] - 20s 13ms/step - loss: 0.3690 - accuracy: 0.8773 - val_loss: 1.0045 - val_accuracy: 0.7101 - lr: 2.0000e-04 Epoch 10/100 1563/1563 [==============================] - 20s 13ms/step - loss: 0.3182 - accuracy: 0.8943 - val_loss: 0.8568 - val_accuracy: 0.7360 - lr: 2.0000e-04 Epoch 11/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.2737 - accuracy: 0.9111 - val_loss: 1.0553 - val_accuracy: 0.6906 - lr: 2.0000e-04 Epoch 12/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.2412 - accuracy: 0.9203 - val_loss: 0.9602 - val_accuracy: 0.7462 - lr: 2.0000e-04 Epoch 13/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.2117 - accuracy: 0.9318 - val_loss: 0.8766 - val_accuracy: 0.7637 - lr: 2.0000e-04 Epoch 14/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.1918 - accuracy: 0.9394 - val_loss: 0.8893 - val_accuracy: 0.7730 - lr: 2.0000e-04 Epoch 15/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.1699 - accuracy: 0.9469 - val_loss: 0.9633 - val_accuracy: 0.7584 - lr: 2.0000e-04 Epoch 16/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.1524 - accuracy: 0.9523 - val_loss: 1.1303 - val_accuracy: 0.7403 - lr: 2.0000e-04 Epoch 17/100 1563/1563 [==============================] - 20s 12ms/step - loss: 0.1465 - accuracy: 0.9527 - val_loss: 0.9004 - val_accuracy: 0.7645 - lr: 2.0000e-04 Epoch 18/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.1346 - accuracy: 0.9570 - val_loss: 0.9466 - val_accuracy: 0.7824 - lr: 2.0000e-04 Epoch 19/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.1251 - accuracy: 0.9592 - val_loss: 0.9624 - val_accuracy: 0.7870 - lr: 2.0000e-04 Epoch 20/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.1182 - accuracy: 0.9632 - val_loss: 0.8937 - val_accuracy: 0.7848 - lr: 2.0000e-04 Epoch 21/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.1110 - accuracy: 0.9650 - val_loss: 0.9687 - val_accuracy: 0.7867 - lr: 2.0000e-04 Epoch 22/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.1008 - accuracy: 0.9686 - val_loss: 0.9610 - val_accuracy: 0.7901 - lr: 2.0000e-04 Epoch 23/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.1006 - accuracy: 0.9684 - val_loss: 0.9554 - val_accuracy: 0.7829 - lr: 2.0000e-04 Epoch 24/100 1563/1563 [==============================] - 20s 13ms/step - loss: 0.0938 - accuracy: 0.9697 - val_loss: 1.0016 - val_accuracy: 0.8019 - lr: 2.0000e-04 Epoch 25/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0922 - accuracy: 0.9709 - val_loss: 0.8998 - val_accuracy: 0.8170 - lr: 2.0000e-04 Epoch 26/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0846 - accuracy: 0.9742 - val_loss: 1.0011 - val_accuracy: 0.8048 - lr: 2.0000e-04 Epoch 27/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0838 - accuracy: 0.9739 - val_loss: 1.2208 - val_accuracy: 0.7550 - lr: 2.0000e-04 Epoch 28/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0848 - accuracy: 0.9740 - val_loss: 1.1191 - val_accuracy: 0.7754 - lr: 2.0000e-04 Epoch 29/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0758 - accuracy: 0.9769 - val_loss: 0.9881 - val_accuracy: 0.8048 - lr: 2.0000e-04 Epoch 30/100 1563/1563 [==============================] - 20s 13ms/step - loss: 0.0782 - accuracy: 0.9751 - val_loss: 1.0138 - val_accuracy: 0.8034 - lr: 2.0000e-04 Epoch 31/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0688 - accuracy: 0.9782 - val_loss: 0.9861 - val_accuracy: 0.8107 - lr: 2.0000e-04 Epoch 32/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0736 - accuracy: 0.9770 - val_loss: 0.9553 - val_accuracy: 0.8166 - lr: 2.0000e-04 Epoch 33/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0635 - accuracy: 0.9800 - val_loss: 1.0335 - val_accuracy: 0.8110 - lr: 2.0000e-04 Epoch 34/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0696 - accuracy: 0.9780 - val_loss: 0.9436 - val_accuracy: 0.8226 - lr: 2.0000e-04 Epoch 35/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0609 - accuracy: 0.9813 - val_loss: 1.0996 - val_accuracy: 0.8040 - lr: 2.0000e-04 Epoch 36/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0587 - accuracy: 0.9814 - val_loss: 1.0307 - val_accuracy: 0.8192 - lr: 2.0000e-04 Epoch 37/100 1563/1563 [==============================] - 20s 13ms/step - loss: 0.0652 - accuracy: 0.9796 - val_loss: 1.0396 - val_accuracy: 0.8117 - lr: 2.0000e-04 Epoch 38/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0578 - accuracy: 0.9813 - val_loss: 0.9917 - val_accuracy: 0.8248 - lr: 2.0000e-04 Epoch 39/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0575 - accuracy: 0.9811 - val_loss: 1.0641 - val_accuracy: 0.8136 - lr: 2.0000e-04 Epoch 40/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0554 - accuracy: 0.9825 - val_loss: 1.0283 - val_accuracy: 0.8235 - lr: 2.0000e-04 Epoch 41/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0526 - accuracy: 0.9831 - val_loss: 1.0807 - val_accuracy: 0.8155 - lr: 2.0000e-04 Epoch 42/100 1563/1563 [==============================] - 20s 12ms/step - loss: 0.0565 - accuracy: 0.9822 - val_loss: 1.0070 - val_accuracy: 0.8222 - lr: 2.0000e-04 Epoch 43/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0504 - accuracy: 0.9843 - val_loss: 1.0256 - val_accuracy: 0.8189 - lr: 2.0000e-04 Epoch 44/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0498 - accuracy: 0.9846 - val_loss: 1.0875 - val_accuracy: 0.8206 - lr: 2.0000e-04 Epoch 45/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0495 - accuracy: 0.9845 - val_loss: 1.0870 - val_accuracy: 0.8182 - lr: 2.0000e-04 Epoch 46/100 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0501 - accuracy: 0.9835 - val_loss: 1.0730 - val_accuracy: 0.8245 - lr: 2.0000e-04 Epoch 47/100 1563/1563 [==============================] - 20s 13ms/step - loss: 0.0494 - accuracy: 0.9844 - val_loss: 1.0501 - val_accuracy: 0.8154 - lr: 2.0000e-04 Epoch 48/100 1562/1563 [============================>.] - ETA: 0s - loss: 0.0454 - accuracy: 0.9851 Epoch 48: ReduceLROnPlateau reducing learning rate to 9.999999747378752e-05. 1563/1563 [==============================] - 19s 12ms/step - loss: 0.0454 - accuracy: 0.9851 - val_loss: 1.0671 - val_accuracy: 0.8198 - lr: 2.0000e-04
file_path_history_callback_object = "/kaggle/working/trainHistoryDictionary"
with open(file_path_history_callback_object, "wb") as history_callback_object_file:
pickle.dump(history_callback_object.history, history_callback_object_file)
# This is used to load the history_callback_object whenever necessary.
# with open(file_path_history_callback_object, "rb") as history_callback_object_file:
# history_callback_object = pickle.load(history_callback_object_file)
file_path_model = "./model.h5"
# Save the architecture and weights of the model.
model.save(file_path_model)
# This is used to load the model whenever necessary.
#file_path_model = "./model.h5"
#model = load_model(file_path_model)
# Save model architecture to JSON.
model_json = model.to_json()
with open("model.json", "w") as json_file:
json_file.write(model_json)
# Save weights to HDF5.
model.save_weights("model_weights.h5")
print("The model has been successfully saved. ")
The model has been successfully saved.
fig = px.line(data_frame = history_callback_object.history,
y = ["loss", "val_loss"],
labels = {"index":"Epoch", "value":"Loss"},
title = "Loss vs Epoch")
fig.show()
history_callback_object.history.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy', 'lr'])
fig = px.line(data_frame = history_callback_object.history,
y = ["accuracy", "val_accuracy"],
labels = {"index":"Epoch", "value":"Accuracy"},
title = "Accuracy vs Epoch")
fig.show()
history_callback_object.history.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy', 'lr'])
fig, (ax1, ax2) = plt.subplots(1, 2)
ax1.plot(history_callback_object.history["loss"], label = "loss")
ax1.plot(history_callback_object.history["val_loss"], label = "val_loss")
ax1.legend()
ax1.set_xlabel("Epoch")
ax1.set_ylabel("Loss")
ax1.set_title("Loss vs Epoch")
ax1.grid(True)
ax2.plot(history_callback_object.history["accuracy"], label = "accuracy")
ax2.plot(history_callback_object.history["val_accuracy"], label = "val_accuracy")
ax2.legend()
ax2.set_xlabel("Epoch")
ax2.set_ylabel("Accuracy")
ax2.set_title("Accuracy vs Epoch")
ax2.grid(True)
fig.set_figheight(10)
fig.set_figwidth(35)
plt.show()
history_callback_object.history.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy', 'lr'])
# y_pred stores the probabilities for each emotion. The highest probability corresponds to the predicted emotion.
y_pred = model.predict(X_test)
print(y_pred[0])
y_pred
196/196 [==============================] - 2s 4ms/step [4.4812859e-07 9.9999940e-01 1.5306861e-07 6.5854424e-09 2.8696998e-09 4.3102496e-08 1.8255992e-09]
array([[4.4812859e-07, 9.9999940e-01, 1.5306861e-07, ..., 2.8696998e-09, 4.3102496e-08, 1.8255992e-09], [2.5550537e-06, 1.7579818e-11, 3.3049702e-07, ..., 3.1553802e-06, 2.2884456e-07, 2.1137349e-07], [4.0197290e-05, 9.9995780e-01, 1.5832669e-06, ..., 9.0113929e-08, 2.4097392e-07, 2.7453646e-08], ..., [9.2926160e-07, 9.2643428e-13, 9.0047145e-08, ..., 2.3080160e-07, 3.2290561e-08, 2.4787786e-07], [1.5925167e-06, 2.9844681e-08, 4.5191732e-06, ..., 8.6245282e-06, 4.7380272e-06, 9.9997699e-01], [1.1010153e-06, 9.9999869e-01, 2.0426337e-07, ..., 6.0741305e-09, 2.1756904e-08, 2.4073408e-09]], dtype=float32)
y_pred = np.argmax(y_pred, axis = 1)
print(y_pred[0])
y_pred
1
array([1, 3, 1, ..., 3, 6, 1])
y_check = np.argmax(y_test, axis = 1)
print(y_check[0])
y_check
1
array([1, 3, 1, ..., 3, 6, 1])
print("Accuracy of model on testing data: ", model.evaluate(X_test, y_test)[1] * 100 , "%", sep = "")
196/196 [==============================] - 1s 5ms/step - loss: 0.9956 - accuracy: 0.8245 Accuracy of model on testing data: 82.44799971580505%
print("Accuracy of model on validation data: ", model.evaluate(X_valid, y_valid)[1] * 100 , "%", sep = "")
196/196 [==============================] - 1s 6ms/step - loss: 0.9917 - accuracy: 0.8248 Accuracy of model on validation data: 82.48000144958496%
model_loss, model_accuracy = model.evaluate(X_test, y_test)
print("Model Test Loss: %f" % (model_loss))
print("Model Test Loss: %f" % (model_loss * 100), "%\n", sep = "")
print("Model Test Accuracy: %f" % (model_accuracy))
print("Model Test Accuracy: %f" % (model_accuracy * 100), "%", sep = "")
196/196 [==============================] - 1s 6ms/step - loss: 0.9956 - accuracy: 0.8245 Model Test Loss: 0.995630 Model Test Loss: 99.562991% Model Test Accuracy: 0.824480 Model Test Accuracy: 82.448000%
# Calculate our accuracy score.
accuracy = accuracy_score(y_check, y_pred)
# Calculate our precision score.
precision = precision_score(y_check, y_pred, average = "micro")
# Calculate our recall score.
recall = recall_score(y_check, y_pred, average = "micro")
# Calculate our f1-score.
f1 = f1_score(y_check, y_pred, average = "micro")
# Print each of our scores to inspect performance.
print("Accuracy Score: %f" % (accuracy * 100), "%", sep = "")
print("Precision Score: %f" % (precision * 100), "%", sep = "")
print("Recall Score: %f" % (recall * 100), "%", sep = "")
print("F1 Score: %f" % (f1 * 100), "%", sep = "")
Accuracy Score: 82.448000% Precision Score: 82.448000% Recall Score: 82.448000% F1 Score: 82.448000%
fig = plt.figure(figsize=(10, 10))
plt.title("Confusion Matrix of Model")
plt.xlabel("Predicted")
plt.ylabel("Actual")
cm = multilabel_confusion_matrix(y_check, y_pred)
print(cm, "\n\n\n")
model_matrix = confusion_matrix(y_check, y_pred)
cm = pd.DataFrame(model_matrix,
index = [i for i in emotions_classes_label],
columns = [i for i in emotions_classes_label]
)
ax = sns.heatmap(cm,
annot = True,
fmt = "d",cbar=True)
print(cm, "\n\n\n")
print("0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral\n")
plt.show()
[[[5143 230] [ 156 721]] [[5322 23] [ 0 905]] [[5231 146] [ 243 630]] [[5162 170] [ 159 759]] [[5206 216] [ 248 580]] [[5224 93] [ 46 887]] [[5115 219] [ 245 671]]] Angry Disgust Fear Happy Neutral Sad Surprise Angry 721 10 26 23 52 10 35 Disgust 0 905 0 0 0 0 0 Fear 62 4 630 25 56 46 50 Happy 39 0 17 759 34 21 48 Neutral 63 4 58 42 580 3 78 Sad 4 2 15 11 6 887 8 Surprise 62 3 30 69 68 13 671 0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral
y_pred = model.predict(X_test)
y_result = []
y_actual = []
for pred in y_pred:
y_result.append(np.argmax(pred))
for pred in y_test:
y_actual.append(np.argmax(pred))
196/196 [==============================] - 1s 4ms/step
print("Model Classification Report\n", classification_report(y_actual, y_result))
Model Classification Report precision recall f1-score support 0 0.76 0.82 0.79 877 1 0.98 1.00 0.99 905 2 0.81 0.72 0.76 873 3 0.82 0.83 0.82 918 4 0.73 0.70 0.71 828 5 0.91 0.95 0.93 933 6 0.75 0.73 0.74 916 accuracy 0.82 6250 macro avg 0.82 0.82 0.82 6250 weighted avg 0.82 0.82 0.82 6250
print("0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral\n")
def display_images(images, true_labels, pred_labels, rows, cols):
fig, axes = plt.subplots(rows, cols, figsize=(15, 50))
for i, ax in enumerate(axes.flat):
if i < len(images):
ax.imshow(images[i], cmap='gray')
true = np.argmax(true_labels[i])
pred = np.argmax(pred_labels[i])
ax.set_title(f"True: {emotions_classes_dict[true]}, Pred: {emotions_classes_dict[pred]}")
ax.axis('off')
# Find indices of misclassified images.
misclassified_indices = np.where(y_pred != y_test)[0]
misclassified_indices = misclassified_indices[::7]
# Display images.
display_images(X_test[misclassified_indices], y_test[misclassified_indices], y_pred[misclassified_indices], 30, 5)
plt.show()
0=Angry, 1=Disgust, 2=Fear, 3=Happy, 4=Sad, 5=Surprise, 6=Neutral