import pandas_datareader as pdr
import pandas as pd
import numpy as np
from sklearn.preprocessing import MinMaxScaler
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import LSTM
import tensorflow as tf
import matplotlib.pyplot as plt
import math
from sklearn.metrics import mean_squared_error
key = '43210fb49481145203f782ffa978dcaeb618e051'
df = pdr.get_data_tiingo('AAPL', api_key=key)
df.to_csv('AAPL.csv')
df = pd.read_csv('AAPL.csv')
df1 = df.reset_index()['adjClose']
scaler=MinMaxScaler(feature_range=(0,1))
df1=scaler.fit_transform(np.array(df1).reshape(-1,1))
print(df1)
[[0.00148542] [0.00393385] [0.00514872] ... [0.96662347] [0.97190609] [0.97366696]]
training_size=int(len(df1)*0.67)
test_size=len(df1)-training_size
train_data,test_data=df1[0:training_size,:],df1[training_size:len(df1),:1]
import numpy
def create_dataset(dataset, time_step=1):
dataX, dataY = [], []
for i in range(len(dataset)-time_step-1):
a = dataset[i:(i+time_step), 0]
dataX.append(a)
dataY.append(dataset[i + time_step, 0])
return numpy.array(dataX), numpy.array(dataY)
time_step = 100
X_train, y_train = create_dataset(train_data, time_step)
X_test, ytest = create_dataset(test_data, time_step)
X_train =X_train.reshape(X_train.shape[0],X_train.shape[1] , 1)
X_test = X_test.reshape(X_test.shape[0],X_test.shape[1] , 1)
model=Sequential()
model.add(LSTM(50,return_sequences=True,input_shape=(100,1)))
model.add(LSTM(50,return_sequences=True))
model.add(LSTM(50))
model.add(Dense(1))
model.compile(loss='mean_squared_error',optimizer='adam')
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= lstm (LSTM) (None, 100, 50) 10400 _________________________________________________________________ lstm_1 (LSTM) (None, 100, 50) 20200 _________________________________________________________________ lstm_2 (LSTM) (None, 50) 20200 _________________________________________________________________ dense (Dense) (None, 1) 51 ================================================================= Total params: 50,851 Trainable params: 50,851 Non-trainable params: 0 _________________________________________________________________
model.fit(X_train,y_train,validation_data=(X_test,ytest),epochs=100,batch_size=64,verbose=1)
Epoch 1/100 12/12 [==============================] - 9s 285ms/step - loss: 0.0063 - val_loss: 0.1562 Epoch 2/100 12/12 [==============================] - 2s 187ms/step - loss: 0.0016 - val_loss: 0.0493 Epoch 3/100 12/12 [==============================] - 2s 185ms/step - loss: 7.5204e-04 - val_loss: 0.0142 Epoch 4/100 12/12 [==============================] - 2s 182ms/step - loss: 3.2980e-04 - val_loss: 0.0025 Epoch 5/100 12/12 [==============================] - 2s 195ms/step - loss: 3.0766e-04 - val_loss: 0.0025 Epoch 6/100 12/12 [==============================] - 2s 185ms/step - loss: 2.7404e-04 - val_loss: 0.0030 Epoch 7/100 12/12 [==============================] - 2s 194ms/step - loss: 2.5578e-04 - val_loss: 0.0023 Epoch 8/100 12/12 [==============================] - 2s 186ms/step - loss: 2.4396e-04 - val_loss: 0.0035 Epoch 9/100 12/12 [==============================] - 2s 183ms/step - loss: 2.2680e-04 - val_loss: 0.0039 Epoch 10/100 12/12 [==============================] - 2s 196ms/step - loss: 2.1704e-04 - val_loss: 0.0058 Epoch 11/100 12/12 [==============================] - 2s 201ms/step - loss: 2.1044e-04 - val_loss: 0.0042 Epoch 12/100 12/12 [==============================] - 2s 193ms/step - loss: 1.9316e-04 - val_loss: 0.0042 Epoch 13/100 12/12 [==============================] - 2s 184ms/step - loss: 1.8505e-04 - val_loss: 0.0059 Epoch 14/100 12/12 [==============================] - 2s 186ms/step - loss: 1.7364e-04 - val_loss: 0.0046 Epoch 15/100 12/12 [==============================] - 2s 197ms/step - loss: 1.6726e-04 - val_loss: 0.0040 Epoch 16/100 12/12 [==============================] - 2s 198ms/step - loss: 1.6129e-04 - val_loss: 0.0027 Epoch 17/100 12/12 [==============================] - 2s 195ms/step - loss: 1.5586e-04 - val_loss: 0.0031 Epoch 18/100 12/12 [==============================] - 2s 198ms/step - loss: 1.5318e-04 - val_loss: 0.0022 Epoch 19/100 12/12 [==============================] - 2s 204ms/step - loss: 1.7568e-04 - val_loss: 0.0029 Epoch 20/100 12/12 [==============================] - 2s 188ms/step - loss: 1.6896e-04 - val_loss: 0.0028 Epoch 21/100 12/12 [==============================] - 2s 188ms/step - loss: 1.4737e-04 - val_loss: 0.0022 Epoch 22/100 12/12 [==============================] - 2s 186ms/step - loss: 1.4090e-04 - val_loss: 0.0021 Epoch 23/100 12/12 [==============================] - 2s 200ms/step - loss: 1.4175e-04 - val_loss: 0.0033 Epoch 24/100 12/12 [==============================] - 2s 196ms/step - loss: 1.3395e-04 - val_loss: 0.0028 Epoch 25/100 12/12 [==============================] - 2s 187ms/step - loss: 1.4086e-04 - val_loss: 0.0028 Epoch 26/100 12/12 [==============================] - 2s 204ms/step - loss: 1.3343e-04 - val_loss: 0.0044 Epoch 27/100 12/12 [==============================] - 2s 188ms/step - loss: 1.3890e-04 - val_loss: 0.0086 Epoch 28/100 12/12 [==============================] - 2s 196ms/step - loss: 1.3575e-04 - val_loss: 0.0054 Epoch 29/100 12/12 [==============================] - 2s 207ms/step - loss: 1.2835e-04 - val_loss: 0.0048 Epoch 30/100 12/12 [==============================] - 2s 197ms/step - loss: 1.2525e-04 - val_loss: 0.0054 Epoch 31/100 12/12 [==============================] - 2s 199ms/step - loss: 1.2597e-04 - val_loss: 0.0098 Epoch 32/100 12/12 [==============================] - 2s 200ms/step - loss: 1.2983e-04 - val_loss: 0.0093 Epoch 33/100 12/12 [==============================] - 2s 210ms/step - loss: 1.2832e-04 - val_loss: 0.0038 Epoch 34/100 12/12 [==============================] - 2s 206ms/step - loss: 1.2357e-04 - val_loss: 0.0133 Epoch 35/100 12/12 [==============================] - 2s 196ms/step - loss: 1.2827e-04 - val_loss: 0.0072 Epoch 36/100 12/12 [==============================] - 2s 189ms/step - loss: 1.1870e-04 - val_loss: 0.0068 Epoch 37/100 12/12 [==============================] - 2s 201ms/step - loss: 1.1568e-04 - val_loss: 0.0073 Epoch 38/100 12/12 [==============================] - 2s 200ms/step - loss: 1.3279e-04 - val_loss: 0.0068 Epoch 39/100 12/12 [==============================] - 2s 199ms/step - loss: 1.2043e-04 - val_loss: 0.0043 Epoch 40/100 12/12 [==============================] - 2s 193ms/step - loss: 1.1275e-04 - val_loss: 0.0064 Epoch 41/100 12/12 [==============================] - 2s 195ms/step - loss: 1.1446e-04 - val_loss: 0.0068 Epoch 42/100 12/12 [==============================] - 2s 187ms/step - loss: 1.1253e-04 - val_loss: 0.0071 Epoch 43/100 12/12 [==============================] - 2s 198ms/step - loss: 1.0861e-04 - val_loss: 0.0036 Epoch 44/100 12/12 [==============================] - 2s 197ms/step - loss: 1.1184e-04 - val_loss: 0.0067 Epoch 45/100 12/12 [==============================] - 2s 207ms/step - loss: 1.0702e-04 - val_loss: 0.0078 Epoch 46/100 12/12 [==============================] - 2s 196ms/step - loss: 1.1542e-04 - val_loss: 0.0042 Epoch 47/100 12/12 [==============================] - 2s 199ms/step - loss: 1.0734e-04 - val_loss: 0.0032 Epoch 48/100 12/12 [==============================] - 2s 188ms/step - loss: 1.0835e-04 - val_loss: 0.0044 Epoch 49/100 12/12 [==============================] - 2s 188ms/step - loss: 1.1088e-04 - val_loss: 0.0046 Epoch 50/100 12/12 [==============================] - 2s 188ms/step - loss: 1.0231e-04 - val_loss: 0.0033 Epoch 51/100 12/12 [==============================] - 2s 190ms/step - loss: 1.0493e-04 - val_loss: 0.0033 Epoch 52/100 12/12 [==============================] - 2s 191ms/step - loss: 1.0977e-04 - val_loss: 0.0018 Epoch 53/100 12/12 [==============================] - 2s 196ms/step - loss: 1.0476e-04 - val_loss: 0.0022 Epoch 54/100 12/12 [==============================] - 2s 191ms/step - loss: 1.0281e-04 - val_loss: 0.0020 Epoch 55/100 12/12 [==============================] - 2s 193ms/step - loss: 9.6955e-05 - val_loss: 0.0022 Epoch 56/100 12/12 [==============================] - 2s 201ms/step - loss: 1.0400e-04 - val_loss: 0.0021 Epoch 57/100 12/12 [==============================] - 2s 192ms/step - loss: 1.0321e-04 - val_loss: 0.0019 Epoch 58/100 12/12 [==============================] - 2s 190ms/step - loss: 9.4004e-05 - val_loss: 0.0018 Epoch 59/100 12/12 [==============================] - 2s 192ms/step - loss: 9.6459e-05 - val_loss: 0.0018 Epoch 60/100 12/12 [==============================] - 2s 192ms/step - loss: 9.2638e-05 - val_loss: 0.0018 Epoch 61/100 12/12 [==============================] - 2s 192ms/step - loss: 9.3041e-05 - val_loss: 0.0020 Epoch 62/100 12/12 [==============================] - 2s 199ms/step - loss: 9.0625e-05 - val_loss: 0.0018 Epoch 63/100 12/12 [==============================] - 2s 189ms/step - loss: 9.3909e-05 - val_loss: 0.0023 Epoch 64/100 12/12 [==============================] - 2s 193ms/step - loss: 8.9436e-05 - val_loss: 0.0017 Epoch 65/100 12/12 [==============================] - 2s 204ms/step - loss: 8.9547e-05 - val_loss: 0.0017 Epoch 66/100 12/12 [==============================] - 2s 189ms/step - loss: 8.9017e-05 - val_loss: 0.0027 Epoch 67/100 12/12 [==============================] - 2s 196ms/step - loss: 9.2652e-05 - val_loss: 0.0030 Epoch 68/100 12/12 [==============================] - 2s 198ms/step - loss: 1.0096e-04 - val_loss: 0.0028 Epoch 69/100 12/12 [==============================] - 2s 196ms/step - loss: 9.1219e-05 - val_loss: 0.0016 Epoch 70/100 12/12 [==============================] - 2s 205ms/step - loss: 8.5552e-05 - val_loss: 0.0017 Epoch 71/100 12/12 [==============================] - 2s 193ms/step - loss: 8.5987e-05 - val_loss: 0.0051 Epoch 72/100 12/12 [==============================] - 2s 190ms/step - loss: 1.0786e-04 - val_loss: 0.0026 Epoch 73/100 12/12 [==============================] - 2s 197ms/step - loss: 8.7348e-05 - val_loss: 0.0019 Epoch 74/100 12/12 [==============================] - 2s 191ms/step - loss: 8.0757e-05 - val_loss: 0.0020 Epoch 75/100 12/12 [==============================] - 2s 195ms/step - loss: 8.3034e-05 - val_loss: 0.0017 Epoch 76/100 12/12 [==============================] - 2s 197ms/step - loss: 8.2316e-05 - val_loss: 0.0042 Epoch 77/100 12/12 [==============================] - 2s 199ms/step - loss: 8.0791e-05 - val_loss: 0.0022 Epoch 78/100 12/12 [==============================] - 2s 201ms/step - loss: 8.3808e-05 - val_loss: 0.0017 Epoch 79/100 12/12 [==============================] - 2s 191ms/step - loss: 8.1456e-05 - val_loss: 0.0019 Epoch 80/100 12/12 [==============================] - 2s 197ms/step - loss: 7.9663e-05 - val_loss: 0.0019 Epoch 81/100 12/12 [==============================] - 2s 197ms/step - loss: 8.0628e-05 - val_loss: 0.0026 Epoch 82/100 12/12 [==============================] - 2s 199ms/step - loss: 7.7418e-05 - val_loss: 0.0028 Epoch 83/100 12/12 [==============================] - 2s 202ms/step - loss: 7.5575e-05 - val_loss: 0.0051 Epoch 84/100 12/12 [==============================] - 2s 200ms/step - loss: 8.5731e-05 - val_loss: 0.0020 Epoch 85/100 12/12 [==============================] - 2s 194ms/step - loss: 1.0206e-04 - val_loss: 0.0015 Epoch 86/100 12/12 [==============================] - 2s 196ms/step - loss: 7.9265e-05 - val_loss: 0.0045 Epoch 87/100 12/12 [==============================] - 2s 197ms/step - loss: 8.0333e-05 - val_loss: 0.0072 Epoch 88/100 12/12 [==============================] - 2s 187ms/step - loss: 8.6542e-05 - val_loss: 0.0037 Epoch 89/100 12/12 [==============================] - 2s 205ms/step - loss: 7.6483e-05 - val_loss: 0.0040 Epoch 90/100 12/12 [==============================] - 2s 198ms/step - loss: 7.2100e-05 - val_loss: 0.0061 Epoch 91/100 12/12 [==============================] - 2s 197ms/step - loss: 6.9616e-05 - val_loss: 0.0051 Epoch 92/100 12/12 [==============================] - 2s 194ms/step - loss: 8.1184e-05 - val_loss: 0.0071 Epoch 93/100 12/12 [==============================] - 2s 187ms/step - loss: 7.8110e-05 - val_loss: 0.0044 Epoch 94/100 12/12 [==============================] - 2s 195ms/step - loss: 9.5490e-05 - val_loss: 0.0031 Epoch 95/100 12/12 [==============================] - 2s 194ms/step - loss: 8.2525e-05 - val_loss: 0.0014 Epoch 96/100 12/12 [==============================] - 2s 197ms/step - loss: 7.1823e-05 - val_loss: 0.0018 Epoch 97/100 12/12 [==============================] - 2s 206ms/step - loss: 8.5870e-05 - val_loss: 0.0023 Epoch 98/100 12/12 [==============================] - 2s 206ms/step - loss: 6.8921e-05 - val_loss: 0.0014 Epoch 99/100 12/12 [==============================] - 2s 200ms/step - loss: 9.5190e-05 - val_loss: 0.0015 Epoch 100/100 12/12 [==============================] - 2s 188ms/step - loss: 9.2531e-05 - val_loss: 0.0012
<tensorflow.python.keras.callbacks.History at 0x7fe41f21b2d0>
train_predict=model.predict(X_train)
test_predict=model.predict(X_test)
train_predict=scaler.inverse_transform(train_predict)
test_predict=scaler.inverse_transform(test_predict)
math.sqrt(mean_squared_error(y_train,train_predict))
44.1638198226284
math.sqrt(mean_squared_error(ytest,test_predict))
119.18831658437351
look_back=100
trainPredictPlot = numpy.empty_like(df1)
trainPredictPlot[:, :] = np.nan
trainPredictPlot[look_back:len(train_predict)+look_back, :] = train_predict
# shift test predictions for plotting
testPredictPlot = numpy.empty_like(df1)
testPredictPlot[:, :] = numpy.nan
testPredictPlot[len(train_predict)+(look_back*2)+1:len(df1)-1, :] = test_predict
# plot baseline and predictions
plt.plot(scaler.inverse_transform(df1))
plt.plot(trainPredictPlot)
plt.plot(testPredictPlot)
plt.show()
accuracy = model.evaluate(X_test,ytest)
print(accuracy)
10/10 [==============================] - 0s 34ms/step - loss: 0.0012 0.0012427340261638165