import pandas as pd
import numpy as np
import sklearn
from sklearn import preprocessing
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
import keras
from keras import Sequential
from keras.layers.core import Dense, Activation, Dropout
from keras.layers.recurrent import LSTM
from sklearn.metrics import mean_squared_error
from sklearn.utils import shuffle
%matplotlib inline
Using TensorFlow backend.
dataframe = pd.read_csv("../datasets/sine-wave.csv")
plt.figure(figsize=(10,5))
plt.plot(dataframe)
plt.title("Sine Wave")
Text(0.5,1,'Sine Wave')
use a moving forward window of size 50, which means we will use the first 50 data points as out input X to predict y1 — 51st data point. Next, we will use the window between 1 to 51 data points as input X to predict y2 i.e., the 52nd data point and so on
plt.figure(figsize=(10,5))
plt.plot(dataframe[:50])
plt.title("first 50 points")
Text(0.5,1,'first 50 points')
This includes:
i. Normalization of the feature values
ii. Convert the dataset in the time series(up to certain steps)
iii. Split the dataset into training and test
# Preparing the dataset
# normalize the values between -1 and 1
# .fit_Transform function is used to find minimum and maximum values in the data and normalize according to that.
# Since we are normalizing values b/w -1 and 1.
# So after predictions, we have transform again into original form. If needed, the transform can be inverted.
# This is useful for converting predictions back into their original scale for reporting or plotting.
# This can be done by calling the inverse_transform() function
scaler = preprocessing.MinMaxScaler(feature_range=(-1,1))
scaled_data = scaler.fit_transform(dataframe.values)
scaled_dataframe = pd.DataFrame(scaled_data)
# Window size
#Fix the moving window size to be 50.
# For this purpose we use pandas shift function that shifts the entire column by the number we specify.
# we shifted the column up by 1 (hence used -1. we want to predict future values)
# If we want to shift it down by 1, we will have to use +1) and then concatenate that to the original data.
window_size = 50
copied_dataframe = scaled_dataframe.copy()
for i in range(window_size):
scaled_dataframe = pd.concat([scaled_dataframe, copied_dataframe.shift(-(i+1))], axis=1)
scaled_dataframe
0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | ... | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 0.873749 | 0.902566 | 0.927822 | 0.949416 | 0.967263 | 0.981292 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | ... | -0.477101 | -0.531344 | -0.583490 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 |
1 | 0.902566 | 0.927822 | 0.949416 | 0.967263 | 0.981292 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | ... | -0.531344 | -0.583490 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 |
2 | 0.927822 | 0.949416 | 0.967263 | 0.981292 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | ... | -0.583490 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 |
3 | 0.949416 | 0.967263 | 0.981292 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | ... | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 |
4 | 0.967263 | 0.981292 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | ... | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 |
5 | 0.981292 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | ... | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 |
6 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | ... | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 |
7 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | ... | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 |
8 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | ... | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 |
9 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | ... | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 | -0.998360 |
10 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | ... | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 | -0.998360 | -0.992780 |
11 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | ... | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 | -0.998360 | -0.992780 | -0.983282 |
12 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | ... | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 | -0.998360 | -0.992780 | -0.983282 | -0.969904 |
13 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | ... | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 | -0.998360 | -0.992780 | -0.983282 | -0.969904 | -0.952697 |
14 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | ... | -0.981292 | -0.991449 | -0.997693 | -1.000000 | -0.998360 | -0.992780 | -0.983282 | -0.969904 | -0.952697 | -0.931731 |
15 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | ... | -0.991449 | -0.997693 | -1.000000 | -0.998360 | -0.992780 | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 |
16 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | ... | -0.997693 | -1.000000 | -0.998360 | -0.992780 | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 |
17 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | ... | -1.000000 | -0.998360 | -0.992780 | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 |
18 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | ... | -0.998360 | -0.992780 | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 |
19 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | ... | -0.992780 | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 |
20 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | ... | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 |
21 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | ... | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 |
22 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | ... | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 |
23 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | ... | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 |
24 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | ... | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 |
25 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | ... | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 |
26 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | ... | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 |
27 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | ... | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 |
28 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | -0.243547 | ... | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 |
29 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | -0.243547 | -0.303967 | ... | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
4970 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4971 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 | -0.192597 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4972 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 | -0.192597 | -0.130601 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4973 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 | -0.192597 | -0.130601 | -0.068090 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4974 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 | -0.192597 | -0.130601 | -0.068090 | -0.005310 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4975 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 | -0.192597 | -0.130601 | -0.068090 | -0.005310 | 0.057491 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4976 | -0.430584 | -0.373061 | -0.314067 | -0.253833 | -0.192597 | -0.130601 | -0.068090 | -0.005310 | 0.057491 | 0.120065 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4977 | -0.373061 | -0.314067 | -0.253833 | -0.192597 | -0.130601 | -0.068090 | -0.005310 | 0.057491 | 0.120065 | 0.182166 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4978 | -0.314067 | -0.253833 | -0.192597 | -0.130601 | -0.068090 | -0.005310 | 0.057491 | 0.120065 | 0.182166 | 0.243547 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4979 | -0.253833 | -0.192597 | -0.130601 | -0.068090 | -0.005310 | 0.057491 | 0.120065 | 0.182166 | 0.243547 | 0.303967 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4980 | -0.192597 | -0.130601 | -0.068090 | -0.005310 | 0.057491 | 0.120065 | 0.182166 | 0.243547 | 0.303967 | 0.363188 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4981 | -0.130601 | -0.068090 | -0.005310 | 0.057491 | 0.120065 | 0.182166 | 0.243547 | 0.303967 | 0.363188 | 0.420975 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4982 | -0.068090 | -0.005310 | 0.057491 | 0.120065 | 0.182166 | 0.243547 | 0.303967 | 0.363188 | 0.420975 | 0.477101 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4983 | -0.005310 | 0.057491 | 0.120065 | 0.182166 | 0.243547 | 0.303967 | 0.363188 | 0.420975 | 0.477101 | 0.531344 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4984 | 0.057491 | 0.120065 | 0.182166 | 0.243547 | 0.303967 | 0.363188 | 0.420975 | 0.477101 | 0.531344 | 0.583490 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4985 | 0.120065 | 0.182166 | 0.243547 | 0.303967 | 0.363188 | 0.420975 | 0.477101 | 0.531344 | 0.583490 | 0.633333 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4986 | 0.182166 | 0.243547 | 0.303967 | 0.363188 | 0.420975 | 0.477101 | 0.531344 | 0.583490 | 0.633333 | 0.680677 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4987 | 0.243547 | 0.303967 | 0.363188 | 0.420975 | 0.477101 | 0.531344 | 0.583490 | 0.633333 | 0.680677 | 0.725334 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4988 | 0.303967 | 0.363188 | 0.420975 | 0.477101 | 0.531344 | 0.583490 | 0.633333 | 0.680677 | 0.725334 | 0.767129 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4989 | 0.363188 | 0.420975 | 0.477101 | 0.531344 | 0.583490 | 0.633333 | 0.680677 | 0.725334 | 0.767129 | 0.805896 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4990 | 0.420975 | 0.477101 | 0.531344 | 0.583490 | 0.633333 | 0.680677 | 0.725334 | 0.767129 | 0.805896 | 0.841483 | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4991 | 0.477101 | 0.531344 | 0.583490 | 0.633333 | 0.680677 | 0.725334 | 0.767129 | 0.805896 | 0.841483 | NaN | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4992 | 0.531344 | 0.583490 | 0.633333 | 0.680677 | 0.725334 | 0.767129 | 0.805896 | 0.841483 | NaN | NaN | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4993 | 0.583490 | 0.633333 | 0.680677 | 0.725334 | 0.767129 | 0.805896 | 0.841483 | NaN | NaN | NaN | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4994 | 0.633333 | 0.680677 | 0.725334 | 0.767129 | 0.805896 | 0.841483 | NaN | NaN | NaN | NaN | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4995 | 0.680677 | 0.725334 | 0.767129 | 0.805896 | 0.841483 | NaN | NaN | NaN | NaN | NaN | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4996 | 0.725334 | 0.767129 | 0.805896 | 0.841483 | NaN | NaN | NaN | NaN | NaN | NaN | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4997 | 0.767129 | 0.805896 | 0.841483 | NaN | NaN | NaN | NaN | NaN | NaN | NaN | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4998 | 0.805896 | 0.841483 | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
4999 | 0.841483 | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
5000 rows × 51 columns
# Remove missing values
scaled_dataframe.dropna(axis=0, inplace=True)
scaled_dataframe
0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | ... | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 0.873749 | 0.902566 | 0.927822 | 0.949416 | 0.967263 | 0.981292 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | ... | -0.477101 | -0.531344 | -0.583490 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 |
1 | 0.902566 | 0.927822 | 0.949416 | 0.967263 | 0.981292 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | ... | -0.531344 | -0.583490 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 |
2 | 0.927822 | 0.949416 | 0.967263 | 0.981292 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | ... | -0.583490 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 |
3 | 0.949416 | 0.967263 | 0.981292 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | ... | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 |
4 | 0.967263 | 0.981292 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | ... | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 |
5 | 0.981292 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | ... | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 |
6 | 0.991449 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | ... | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 |
7 | 0.997693 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | ... | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 |
8 | 1.000000 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | ... | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 |
9 | 0.998360 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | ... | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 | -0.998360 |
10 | 0.992780 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | ... | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 | -0.998360 | -0.992780 |
11 | 0.983282 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | ... | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 | -0.998360 | -0.992780 | -0.983282 |
12 | 0.969904 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | ... | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 | -0.998360 | -0.992780 | -0.983282 | -0.969904 |
13 | 0.952697 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | ... | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 | -0.998360 | -0.992780 | -0.983282 | -0.969904 | -0.952697 |
14 | 0.931731 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | ... | -0.981292 | -0.991449 | -0.997693 | -1.000000 | -0.998360 | -0.992780 | -0.983282 | -0.969904 | -0.952697 | -0.931731 |
15 | 0.907088 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | ... | -0.991449 | -0.997693 | -1.000000 | -0.998360 | -0.992780 | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 |
16 | 0.878865 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | ... | -0.997693 | -1.000000 | -0.998360 | -0.992780 | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 |
17 | 0.847173 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | ... | -1.000000 | -0.998360 | -0.992780 | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 |
18 | 0.812138 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | ... | -0.998360 | -0.992780 | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 |
19 | 0.773898 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | ... | -0.992780 | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 |
20 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | ... | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 |
21 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | ... | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 |
22 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | ... | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 |
23 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | ... | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 |
24 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | ... | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 |
25 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | ... | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 |
26 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | ... | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 |
27 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | ... | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 |
28 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | -0.243547 | ... | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 |
29 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | -0.243547 | -0.303967 | ... | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
4920 | 0.732603 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | ... | -0.983282 | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 |
4921 | 0.688418 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | ... | -0.969904 | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 |
4922 | 0.641515 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | ... | -0.952697 | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 |
4923 | 0.592081 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | ... | -0.931731 | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 |
4924 | 0.540310 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | ... | -0.907088 | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 |
4925 | 0.486407 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | ... | -0.878865 | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 |
4926 | 0.430584 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | ... | -0.847173 | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 |
4927 | 0.373061 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | ... | -0.812138 | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 |
4928 | 0.314067 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | -0.243547 | ... | -0.773898 | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 |
4929 | 0.253833 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | -0.243547 | -0.303967 | ... | -0.732603 | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 |
4930 | 0.192597 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | -0.243547 | -0.303967 | -0.363188 | ... | -0.688418 | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 | -0.192597 |
4931 | 0.130601 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | -0.243547 | -0.303967 | -0.363188 | -0.420975 | ... | -0.641515 | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 | -0.192597 | -0.130601 |
4932 | 0.068090 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | -0.243547 | -0.303967 | -0.363188 | -0.420975 | -0.477101 | ... | -0.592081 | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 | -0.192597 | -0.130601 | -0.068090 |
4933 | 0.005310 | -0.057491 | -0.120065 | -0.182166 | -0.243547 | -0.303967 | -0.363188 | -0.420975 | -0.477101 | -0.531344 | ... | -0.540310 | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 | -0.192597 | -0.130601 | -0.068090 | -0.005310 |
4934 | -0.057491 | -0.120065 | -0.182166 | -0.243547 | -0.303967 | -0.363188 | -0.420975 | -0.477101 | -0.531344 | -0.583490 | ... | -0.486407 | -0.430584 | -0.373061 | -0.314067 | -0.253833 | -0.192597 | -0.130601 | -0.068090 | -0.005310 | 0.057491 |
4935 | -0.120065 | -0.182166 | -0.243547 | -0.303967 | -0.363188 | -0.420975 | -0.477101 | -0.531344 | -0.583490 | -0.633333 | ... | -0.430584 | -0.373061 | -0.314067 | -0.253833 | -0.192597 | -0.130601 | -0.068090 | -0.005310 | 0.057491 | 0.120065 |
4936 | -0.182166 | -0.243547 | -0.303967 | -0.363188 | -0.420975 | -0.477101 | -0.531344 | -0.583490 | -0.633333 | -0.680677 | ... | -0.373061 | -0.314067 | -0.253833 | -0.192597 | -0.130601 | -0.068090 | -0.005310 | 0.057491 | 0.120065 | 0.182166 |
4937 | -0.243547 | -0.303967 | -0.363188 | -0.420975 | -0.477101 | -0.531344 | -0.583490 | -0.633333 | -0.680677 | -0.725334 | ... | -0.314067 | -0.253833 | -0.192597 | -0.130601 | -0.068090 | -0.005310 | 0.057491 | 0.120065 | 0.182166 | 0.243547 |
4938 | -0.303967 | -0.363188 | -0.420975 | -0.477101 | -0.531344 | -0.583490 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | ... | -0.253833 | -0.192597 | -0.130601 | -0.068090 | -0.005310 | 0.057491 | 0.120065 | 0.182166 | 0.243547 | 0.303967 |
4939 | -0.363188 | -0.420975 | -0.477101 | -0.531344 | -0.583490 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | ... | -0.192597 | -0.130601 | -0.068090 | -0.005310 | 0.057491 | 0.120065 | 0.182166 | 0.243547 | 0.303967 | 0.363188 |
4940 | -0.420975 | -0.477101 | -0.531344 | -0.583490 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | ... | -0.130601 | -0.068090 | -0.005310 | 0.057491 | 0.120065 | 0.182166 | 0.243547 | 0.303967 | 0.363188 | 0.420975 |
4941 | -0.477101 | -0.531344 | -0.583490 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | ... | -0.068090 | -0.005310 | 0.057491 | 0.120065 | 0.182166 | 0.243547 | 0.303967 | 0.363188 | 0.420975 | 0.477101 |
4942 | -0.531344 | -0.583490 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | ... | -0.005310 | 0.057491 | 0.120065 | 0.182166 | 0.243547 | 0.303967 | 0.363188 | 0.420975 | 0.477101 | 0.531344 |
4943 | -0.583490 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | ... | 0.057491 | 0.120065 | 0.182166 | 0.243547 | 0.303967 | 0.363188 | 0.420975 | 0.477101 | 0.531344 | 0.583490 |
4944 | -0.633333 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | ... | 0.120065 | 0.182166 | 0.243547 | 0.303967 | 0.363188 | 0.420975 | 0.477101 | 0.531344 | 0.583490 | 0.633333 |
4945 | -0.680677 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | ... | 0.182166 | 0.243547 | 0.303967 | 0.363188 | 0.420975 | 0.477101 | 0.531344 | 0.583490 | 0.633333 | 0.680677 |
4946 | -0.725334 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | ... | 0.243547 | 0.303967 | 0.363188 | 0.420975 | 0.477101 | 0.531344 | 0.583490 | 0.633333 | 0.680677 | 0.725334 |
4947 | -0.767129 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | ... | 0.303967 | 0.363188 | 0.420975 | 0.477101 | 0.531344 | 0.583490 | 0.633333 | 0.680677 | 0.725334 | 0.767129 |
4948 | -0.805896 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 | ... | 0.363188 | 0.420975 | 0.477101 | 0.531344 | 0.583490 | 0.633333 | 0.680677 | 0.725334 | 0.767129 | 0.805896 |
4949 | -0.841483 | -0.873749 | -0.902566 | -0.927822 | -0.949416 | -0.967263 | -0.981292 | -0.991449 | -0.997693 | -1.000000 | ... | 0.420975 | 0.477101 | 0.531344 | 0.583490 | 0.633333 | 0.680677 | 0.725334 | 0.767129 | 0.805896 | 0.841483 |
4950 rows × 51 columns
split_data = int(round(0.8*scaled_dataframe.shape[0]))
split_data
3960
train_data = scaled_dataframe.iloc[:split_data,:]
test_data = scaled_dataframe.iloc[split_data:,:]
train_data = shuffle(train_data)
# Consider last column as output
train_data_x = train_data.iloc[:,:-1]
train_data_y = train_data.iloc[:,-1]
test_data_x = test_data.iloc[:,:-1]
test_data_y = test_data.iloc[:,-1]
print "train features shape ", train_data_x.shape
print "train label shape ", train_data_y.shape
print "test data y ", test_data_y.shape
train features shape (3960, 50) train label shape (3960,) test data y (990,)
# Reshape it for LSTM
train_data_x = np.array(train_data_x)
test_data_x = np.array(test_data_x)
# print train_data_x.shape[0], train_data_x.shape[1]
train_data_x = train_data_x.reshape(train_data_x.shape[0],train_data_x.shape[1],1)
test_data_x = test_data_x.reshape(test_data_x.shape[0],test_data_x.shape[1],1)
print "Reshape train example x ", train_data_x.shape
print "Reshape test example x ", test_data_x.shape
input_shape = (50,1)
output = 50
Reshape train example x (3960, 50, 1) Reshape test example x (990, 50, 1)
model = Sequential()
model.add(LSTM(input_shape = input_shape, units= output, return_sequences = True))
model.add(Dropout(0.25))
model.add(LSTM(256))
model.add(Dropout(0.25))
model.add(Dense(1))
model.add(Activation("linear"))
model.compile(loss="mse", optimizer="adam", metrics=['accuracy'])
model.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= lstm_1 (LSTM) (None, 50, 50) 10400 _________________________________________________________________ dropout_1 (Dropout) (None, 50, 50) 0 _________________________________________________________________ lstm_2 (LSTM) (None, 256) 314368 _________________________________________________________________ dropout_2 (Dropout) (None, 256) 0 _________________________________________________________________ dense_1 (Dense) (None, 1) 257 _________________________________________________________________ activation_1 (Activation) (None, 1) 0 ================================================================= Total params: 325,025 Trainable params: 325,025 Non-trainable params: 0 _________________________________________________________________
# Train
model.fit(train_data_x,train_data_y,batch_size=512,epochs=6,validation_split=0.1)
Train on 3564 samples, validate on 396 samples Epoch 1/6 3564/3564 [==============================] - 20s 6ms/step - loss: 0.2740 - acc: 0.0084 - val_loss: 0.0724 - val_acc: 0.0202 Epoch 2/6 3564/3564 [==============================] - 20s 6ms/step - loss: 0.0369 - acc: 0.0202 - val_loss: 0.0047 - val_acc: 0.0202 Epoch 3/6 3564/3564 [==============================] - 17s 5ms/step - loss: 0.0144 - acc: 0.0202 - val_loss: 0.0017 - val_acc: 0.0202 Epoch 4/6 3564/3564 [==============================] - 16s 5ms/step - loss: 0.0079 - acc: 0.0202 - val_loss: 0.0045 - val_acc: 0.0202 Epoch 5/6 3564/3564 [==============================] - 20s 5ms/step - loss: 0.0075 - acc: 0.0202 - val_loss: 0.0020 - val_acc: 0.0202 Epoch 6/6 3564/3564 [==============================] - 18s 5ms/step - loss: 0.0055 - acc: 0.0202 - val_loss: 4.5101e-04 - val_acc: 0.0202
<keras.callbacks.History at 0x7f3550bebcd0>
predicted_output = model.predict(test_data_x)
#converting predictions back into their original scale for reporting or plotting.
# print "predicted output shape ", predicted_output.shape
predicted_output_scaled = scaler.inverse_transform(predicted_output)
# print predicted_output.shape
# convert actual to its original form
# test_data_y = test_data_y.values.reshape(-1,1)
test_data_y = test_data_y.reshape(test_data.shape[0],1)
# print test_data_y.shape
actual_output_scaled = scaler.inverse_transform(test_data_y)
# actual_output.shape
/home/ramesh/anaconda2/lib/python2.7/site-packages/ipykernel_launcher.py:3: FutureWarning: reshape is deprecated and will raise in a subsequent release. Please use .values.reshape(...) instead This is separate from the ipykernel package so we can avoid doing imports until
# model.evaluate(test_data_x, test_data_y)
print "mean square error ", mean_squared_error(actual_output_scaled,predicted_output_scaled)
mean square error 0.00045264009063750757
plt.figure(figsize=(15,7))
plt.plot(actual_output_scaled, '-b', label='actual output')
# we just visualize prediction of first 200 points because predicted and actual output overlaps completely
plt.plot(predicted_output_scaled[:200], '-r', label='predicted output')
plt.title("Actual and predicted output for sine wave")
plt.legend(loc='upper left')
<matplotlib.legend.Legend at 0x7f34f8230810>
Till now, we only checked our model whether it has learned the trained data. We can see in above plot that model is able to reproduce the output similar as the original one. Now, we will use LSTM to predict the future output .
Now we use moving forward window method.
Use a moving forward window of size 50, which means we will use the first 50 data points as out input X to predict y1 — 51st data point. Next, we will use the window between 1 to 51 data points as input X to predict y2 i.e., the 52nd data point and so on
# print test_data_x[0]
def movingTestWindowPrediction(number_of_future_predictions, test_data_x):
preds_moving = []
moving_test_window = [test_data_x[0,:].tolist()] # Take time predictions
moving_test_window = np.array(moving_test_window)
for i in range(number_of_future_predictions):
predicted_one_step = model.predict(moving_test_window)
# print "Predicted one step ", predicted_one_step
# print "Appended predicted one step ", predicted_one_step[0][0]
# Append each prediction in list
preds_moving.append(predicted_one_step[0][0])
predicted_one_step = predicted_one_step.reshape(1,1,1)
# way of concatenating 3d array
# we always maintain the size of window i.e. 50.
# each iteration we remove first element from window and add predicted output at the last of window
moving_test_window = np.concatenate((moving_test_window[:,1:,:], predicted_one_step), axis=1)
preds_moving = np.array(preds_moving)
preds_moving = preds_moving.reshape(preds_moving.shape[0],1)
preds_moving = scaler.inverse_transform(preds_moving)
return preds_moving
preds_moving = movingTestWindowPrediction(500, test_data_x)
plt.plot(actual_output_scaled)
plt.plot(preds_moving)
plt.title("Sine Wave prediction using LSTM for 500 steps")
plt.xlabel("Number of steps")
plt.ylabel("Frequency")
plt.show()