#!/usr/bin/env python # coding: utf-8 # `KDD2024 Tutorial / A Hands-On Introduction to Time Series Classification and Regression` # # Deep learning based Time Series Machine Learning in `aeon` # # Deep learning is proven to be very effective for Time Series Classification (TSC) tasks after the extensive experiments done in [[1]](#references), especially convolution based architectures (i.e. FCN and ResNet [[2]](#references)). A while later, InceptionTime (Convolution based) was proposed to become the new state-of-the-art deep learning model for TSC [[3]](#references). In [[4]](#references), new hand-crafted convolution filters were proposed to boost InceptionTime. The model proposed in [[4]](#references), Hybrid InceptionTime (H-InceptionTime) is currently, at the time of writing, the state-of-the-art deep learning model for TSC following [[5]](#references). More recently, in the latest Time Series Regression (TSER) review [[6]](#references), the deep learning model InceptionTime is seen to be the state-of-the-art deep learning model. # # In this notebook, we cover the usage of the deep learning models for both TSC and TSER on EEG applications. # # For all figures used in this demo, we use the one provided by the [Deep Learning for Time Series Classification webpage](https://msd-irimas.github.io/pages/dl4tsc/) with this reference figure for all legends needed: # # __Note:__ All deep learners in `aeon` currently are based on `tensorflow`. You will need to `pip install tensorflow` to run this code. # Deep Legend. # ## Table of Contents # # * [Load example data](#load-data) # * [Simple Multiple Linear Perceptron (MLP)](#mlp) # * [Using MLP for Classification and Regression](#mlp-usage) # * [Time Convolutional Neural Network (TimeCNN)](#time-cnn-network) # * [Using TimeCNN for Classification and Regression](#time-cnn-usage) # * [Inception Based Networks](#inception-models) # * [InceptionTime](#inceptiontime) # * [Using InceptionTime for Classification and Regression](#inceptiontime-usage) # * [Hybrid InceptionTime](#hinceptiontime) # * [Using Hybrid InceptionTime for Classification and Regression](#hinceptiontime-usage) # * [LITETime](#litetime) # * [Using LITETime for Classification and Regression](#litetime-usage) # * [Performance on the UCR univariate classification datasets](#evaluation) # * [References](#references) # In[ ]: get_ipython().system('pip install aeon==0.11.0 tensorflow') get_ipython().system('mkdir -p data') get_ipython().system('wget -nc https://raw.githubusercontent.com/aeon-tutorials/KDD-2024/main/Notebooks/data/KDD_MTSC_TRAIN.ts -P data/') get_ipython().system('wget -nc https://raw.githubusercontent.com/aeon-tutorials/KDD-2024/main/Notebooks/data/KDD_MTSC_TEST.ts -P data/') get_ipython().system('wget -nc https://raw.githubusercontent.com/aeon-tutorials/KDD-2024/main/Notebooks/data/KDD_UTSC_TRAIN.ts -P data/') get_ipython().system('wget -nc https://raw.githubusercontent.com/aeon-tutorials/KDD-2024/main/Notebooks/data/KDD_UTSC_TEST.ts -P data/') get_ipython().system('wget -nc https://raw.githubusercontent.com/aeon-tutorials/KDD-2024/main/Notebooks/data/KDD_MTSER_TRAIN.ts -P data/') get_ipython().system('wget -nc https://raw.githubusercontent.com/aeon-tutorials/KDD-2024/main/Notebooks/data/KDD_MTSER_TEST.ts -P data/') get_ipython().system('wget -nc https://raw.githubusercontent.com/aeon-tutorials/KDD-2024/main/Notebooks/data/KDD_UTSER_TRAIN.ts -P data/') get_ipython().system('wget -nc https://raw.githubusercontent.com/aeon-tutorials/KDD-2024/main/Notebooks/data/KDD_UTSER_TEST.ts -P data/') # In[1]: # There are some deprecation warnings present in the notebook, we will ignore them. # Remove this cell if you are interested in finding out what is changing soon, for # aeon there will be big changes in out v1.0.0 release! import warnings warnings.filterwarnings("ignore", category=FutureWarning) # In[2]: from aeon.registry import all_estimators all_estimators( "classifier", filter_tags={"algorithm_type": "deeplearning"}, as_dataframe=True ) # In[3]: all_estimators( "regressor", filter_tags={"algorithm_type": "deeplearning"}, as_dataframe=True ) # ## Load example data # ### EEG Classification # In[4]: from aeon.datasets import load_from_tsfile X_train_c, y_train_c = load_from_tsfile("./data/KDD_UTSC_TRAIN.ts") X_test_c, y_test_c = load_from_tsfile("./data/KDD_UTSC_TEST.ts") # znormalize the series X_train_c = (X_train_c - X_train_c.mean(axis=-1, keepdims=True)) / (X_train_c.std(axis=-1, keepdims=True)) X_test_c = (X_test_c - X_test_c.mean(axis=-1, keepdims=True)) / (X_test_c.std(axis=-1, keepdims=True)) print("Train shape:", X_train_c.shape) print("Test shape:", X_test_c.shape) # In[5]: from aeon.visualisation import plot_collection_by_class plot_collection_by_class(X_train_c[:,0,:], y_train_c) # ### EEG Regression # In[6]: X_train_r, y_train_r = load_from_tsfile("./data/KDD_UTSER_TRAIN.ts") X_test_r, y_test_r = load_from_tsfile("./data/KDD_UTSER_TEST.ts") # znormalize the series X_train_r = (X_train_r - X_train_r.mean(axis=-1, keepdims=True)) / (X_train_r.std(axis=-1, keepdims=True)) X_test_r = (X_test_r - X_test_r.mean(axis=-1, keepdims=True)) / (X_test_r.std(axis=-1, keepdims=True)) print("Train shape:", X_train_r.shape) print("Test shape:", X_test_r.shape) # In[7]: from matplotlib import pyplot as plt plt.plot(X_train_r[:5,0,:].T) plt.title("EEG Regression Samples") plt.xlabel("Time") plt.ylabel("Value") plt.show() # # Simple Multi Layer Perceptron (MLP) # # The Multilayer Perceptron (MLP) [[2]](#references), the simplest model in neural networks, is often used as a baseline for deep learning models. However, when applied to time series data, it fails to account for temporal dependencies. The MLP model tries to find the optimal non-linear combination of input features to achieve a good performance of a downstream task. # mlp # ## Using MLP for Classification and Regression # In[8]: from aeon.classification.deep_learning import MLPClassifier from sklearn.metrics import accuracy_score mlp_cls = MLPClassifier(n_epochs=100) mlp_cls.fit(X_train_c, y_train_c) mlp_cls_preds = mlp_cls.predict(X_test_c) print(accuracy_score(y_test_c, mlp_cls_preds)) # In[9]: from aeon.regression.deep_learning import MLPRegressor from sklearn.metrics import mean_squared_error mlp_rgs = MLPRegressor(n_epochs=500) mlp_rgs.fit(X_train_r, y_train_r) mlp_rgs_preds = mlp_rgs.predict(X_test_r) print(mean_squared_error(y_test_r, mlp_rgs_preds)) # In[10]: from aeon.visualisation import plot_scatter_predictions plot_scatter_predictions(y_test_r, mlp_rgs_preds, title="MLP Predictions") # ## The Time Convolutional Neural Network (TimeCNN) # The Time Convolutional Neural Network (TimeCNN) [[7]](#references) is a convolutional neural network where the optimization process focuses on learning the most effective filters to achieve optimal results. Unlike MLP, TimeCNN leverages local convolutions, allowing them to capture and consider temporal dependencies in the data, making them more suitable for time series analysis. # cnn # ## Using TimeCNN for Classification and Regression # In[11]: from aeon.classification.deep_learning import CNNClassifier from sklearn.metrics import accuracy_score # n_epochs chooses the number of training iterations # verbose show the network's detail and the logs of training cnn_cls = CNNClassifier(n_epochs=500, verbose=True, save_best_model=True, best_file_name="best_cnn") cnn_cls.fit(X_train_c, y_train_c) cnn_cls_preds = cnn_cls.predict(X_test_c) print(accuracy_score(y_test_c, cnn_cls_preds)) # In[12]: import matplotlib.pyplot as plt # you can retrieve the history of training to visualize the loss loss = cnn_cls.history.history["loss"] plt.plot(loss, color='blue') plt.xlabel("Epoch") plt.ylabel("Loss") plt.title("Evolution of the training loss.") plt.show() # In[13]: import tensorflow as tf import numpy as np import matplotlib.pyplot as plt import os cnn_best = tf.keras.models.load_model("best_cnn.keras", compile=False) #get conv weights of layer 1 (if you replace 0 by 1, you get the bias) conv1_weights = cnn_best.layers[1].get_weights()[0] #get conv weights of layer 2 (in TimeCNN, it corresponds to layer 3) conv2_weights = cnn_best.layers[3].get_weights()[0] fig,ax = plt.subplots(2,6,figsize=(20,7)) cmap = plt.get_cmap('tab20c') values = np.linspace(0, 1, 18) np.random.shuffle(values) for i in range(6): ax[0,i].plot(conv1_weights[:,0,i],color=cmap(values[i])) ax[0,i].set_title('Conv layer 1, Filter ' + str(i+1)) ax[1,i].plot(conv2_weights[:,0,i],color=cmap(values[6+i])) ax[1,i].set_title('Conv layer 2, Filter ' + str(i+1)) plt.tight_layout() plt.show() os.remove("best_cnn.keras") # In[14]: from aeon.regression.deep_learning import CNNRegressor from sklearn.metrics import mean_squared_error cnn_rgs = CNNRegressor(n_epochs=500) cnn_rgs.fit(X_train_r, y_train_r) cnn_rgs_preds = cnn_rgs.predict(X_test_r) print(mean_squared_error(y_test_r, cnn_rgs_preds)) # In[15]: from aeon.visualisation import plot_scatter_predictions plot_scatter_predictions(y_test_r, cnn_rgs_preds, title="TimeCNN Predictions") # ## Inception Based Models # # __Warning:__ The following models are computationally expensive and may take a long time to train on some setups (especially with no GPU). # ### InceptionTime # InceptionTime [[3]](#references) is an ensemble of multiple Inception models, a convolution based neural network that leverages over TimeCNN by applying different convolution layers in parallel with different characteristics as well as using residual connections to avoid the vanishing gradient issue. # inctime # #### Using InceptionTime for Classification and Regression # In[16]: from aeon.classification.deep_learning import InceptionTimeClassifier from sklearn.metrics import accuracy_score inceptiontime_cls = InceptionTimeClassifier(n_epochs=500) inceptiontime_cls.fit(X_train_c, y_train_c) inceptiontime_cls_preds = inceptiontime_cls.predict(X_test_c) print("InceptionTime: ",accuracy_score(y_test_c, inceptiontime_cls_preds)) # In[17]: from aeon.regression.deep_learning import InceptionTimeRegressor from sklearn.metrics import mean_squared_error inceptiontime_rgs = InceptionTimeRegressor(n_epochs=500) inceptiontime_rgs.fit(X_train_r, y_train_r) inceptiontime_rgs_preds = inceptiontime_rgs.predict(X_test_r) print("InceptionTime: ",mean_squared_error(y_test_r, inceptiontime_rgs_preds)) # In[18]: from aeon.visualisation import plot_scatter_predictions plot_scatter_predictions(y_test_r, inceptiontime_rgs_preds, title="InceptionTime Regressor Predictions") # ### Hybrid InceptionTime # H-InceptionTime [[4]](#references) leverages over the InceptionTime model by adding some hand-crafted convolution filters at the beginning of the model. Such feature engineering technique helps the model generalize better to unseen cases. # hcf # #### Using Hybrid InceptionTime for Classification and Regression # In[19]: from aeon.classification.deep_learning import InceptionTimeClassifier from sklearn.metrics import accuracy_score h_inceptiontime_cls = InceptionTimeClassifier(n_epochs=500, use_custom_filters=True) h_inceptiontime_cls.fit(X_train_c, y_train_c) h_inceptiontime_cls_preds = h_inceptiontime_cls.predict(X_test_c) print("H-InceptionTime: ",accuracy_score(y_test_c, h_inceptiontime_cls_preds)) # In[20]: from aeon.regression.deep_learning import InceptionTimeRegressor from sklearn.metrics import mean_squared_error h_inceptiontime_rgs = InceptionTimeRegressor(n_epochs=20, use_custom_filters=True) h_inceptiontime_rgs.fit(X_train_r, y_train_r) h_inceptiontime_rgs_preds = h_inceptiontime_rgs.predict(X_test_r) print("H-InceptionTime: ",mean_squared_error(y_test_r, h_inceptiontime_rgs_preds)) # In[21]: from aeon.visualisation import plot_scatter_predictions plot_scatter_predictions(y_test_r, h_inceptiontime_rgs_preds, title="H-InceptionTime Regressor Predictions") # ### LITETime # LITETime [[8]](#references) is currently the smallest deep learning model that achieves state-of-the-art performance on TSC. It is based on the Inception architecture but with way less parameters to train while utilizing as well the hand-crafted convolution filters in the first layer. # lite # #### Using LITETime for Classification and Regression # In[22]: from aeon.classification.deep_learning import LITETimeClassifier from sklearn.metrics import accuracy_score litetime_cls = LITETimeClassifier(n_epochs=500) litetime_cls.fit(X_train_c, y_train_c) litetime_cls_preds = litetime_cls.predict(X_test_c) print("LITETime: ",accuracy_score(y_test_c, litetime_cls_preds)) # In[23]: from aeon.regression.deep_learning import LITETimeRegressor from sklearn.metrics import mean_squared_error litetime_rgs = LITETimeRegressor(n_epochs=500) litetime_rgs.fit(X_train_r, y_train_r) litetime_rgs_preds = litetime_rgs.predict(X_test_r) print("LITETime: ",mean_squared_error(y_test_r, litetime_rgs_preds)) # In[24]: from aeon.visualisation import plot_scatter_predictions plot_scatter_predictions(y_test_r, litetime_rgs_preds, title="LITETime Regressor Predictions") # ## Performance on the UCR univariate classification datasets # # Below we show the performance of the `MLP`, `InceptionTime` and `H-InceptionTime` deep learning models on the UCR TSC archive datasets [[4]](#references) using results from the TSC bake off in 2024 [[5]](#references). # In[25]: from aeon.benchmarking import get_estimator_results_as_array from aeon.datasets.tsc_datasets import univariate names = ["CNN", "InceptionTime", "H-InceptionTime", "LITETime", "1NN-DTW"] results, present_names = get_estimator_results_as_array( names, univariate, include_missing=False ) results.shape # In[26]: from aeon.visualisation import plot_critical_difference from aeon.visualisation import plot_boxplot_median plot_critical_difference(results, names) plot_boxplot_median(results, names, plot_type="boxplot") # ## References # # [1] Ismail Fawaz, Hassan, et al. "Deep learning for time series classification: a review." Data mining and knowledge discovery 33.4 (2019): 917-963. # # [2] Wang, Zhiguang et al. "Time series classification from scratch with deep neural networks: A strong baseline." 2017 International joint conference on neural networks (IJCNN). IEEE, 2017. # # [3] Ismail Fawaz, Hassan, et al. "Inceptiontime: Finding alexnet for time series classification." Data Mining and Knowledge Discovery 34.6 (2020): 1936-1962. # # [4] Ismail-Fawaz, Ali, et al. "Deep Learning For Time Series Classification Using New Hand-Crafted Convolution Filters." International Conference on Big Data. IEEE, (2022). # # [5] Middlehurst, Matthew, Patrick Schäfer, and Anthony Bagnall. "Bake off redux: a review and experimental evaluation of recent time series classification algorithms." Data Mining and Knowledge Discovery (2024): 1-74. # # [6] Guijo-Rubio, David, et al. "Unsupervised Feature Based Algorithms for Time Series Extrinsic Regression." arXiv preprint arXiv:2305.01429 (2023). # # [7] Zhao et al. Convolutional neural networks for time series classification. Journal of Systems Engineering and Electronics 28.1 (2017) # # [8] Ismail-Fawaz, Ali, et al. "Lite: Light inception with boosting techniques for time series classification." 2023 IEEE 10th International Conference on Data Science and Advanced Analytics (DSAA). IEEE, 2023. # # [Return to Table of Contents](#toc)