# Copyright 2023 Shane Khalid. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import yfinance as yf
import tensorflow as tf
from tensorflow import keras
from keras.layers import GRU, Dropout, SimpleRNN, LSTM, Dense, SimpleRNN, GRU
from keras.models import Sequential
from sklearn.preprocessing import MinMaxScaler
print("Num GPUs Available: ", len(tf.config.experimental.list_physical_devices('GPU')))
import torch
if torch.cuda.is_available():
device = torch.device("cuda")
print("GPU is available and being used.")
else:
device = torch.device("cpu")
print("GPU is not available, using CPU instead.")
import pandas as pd
import numpy as np
import plotly.express as px
import statsmodels.api as sm
import matplotlib.pyplot as plt
import plotly.graph_objects as go
import matplotlib.pyplot as plt
import matplotlib.dates as dates
import seaborn as sns
import math
import datetime
import keras
import warnings
warnings.filterwarnings('ignore')
from datetime import date, timedelta
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import Dropout
from keras.layers import *
from keras.callbacks import EarlyStopping
from keras.metrics import Accuracy
from keras.metrics import F1Score
from keras.metrics import Precision
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error
from sklearn.metrics import mean_absolute_error
#from sklearn.metrics import accuracy_score
#from sklearn.metrics import precision_score
#from sklearn.metrics import f1_score
%matplotlib inline
2023-10-30 17:10:02.777910: E tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:9342] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered 2023-10-30 17:10:02.778003: E tensorflow/compiler/xla/stream_executor/cuda/cuda_fft.cc:609] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered 2023-10-30 17:10:02.778026: E tensorflow/compiler/xla/stream_executor/cuda/cuda_blas.cc:1518] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered 2023-10-30 17:10:02.785240: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. 2023-10-30 17:10:04.468403: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node Your kernel may have been built without NUMA support. 2023-10-30 17:10:04.474358: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node Your kernel may have been built without NUMA support. 2023-10-30 17:10:04.474418: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node Your kernel may have been built without NUMA support.
Num GPUs Available: 1 GPU is available and being used.
LSTM_model = tf.keras.models.load_model('LSTM_model-Module3.keras')
# Show the model architecture
LSTM_model.summary()
2023-10-30 17:15:46.030479: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node Your kernel may have been built without NUMA support. 2023-10-30 17:15:46.030618: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node Your kernel may have been built without NUMA support. 2023-10-30 17:15:46.030653: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node Your kernel may have been built without NUMA support. 2023-10-30 17:15:48.092769: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node Your kernel may have been built without NUMA support. 2023-10-30 17:15:48.092830: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node Your kernel may have been built without NUMA support. 2023-10-30 17:15:48.092838: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1977] Could not identify NUMA node of platform GPU id 0, defaulting to 0. Your kernel may not have been built with NUMA support. 2023-10-30 17:15:48.092875: I tensorflow/compiler/xla/stream_executor/cuda/cuda_gpu_executor.cc:880] could not open file to read NUMA node: /sys/bus/pci/devices/0000:43:00.0/numa_node Your kernel may have been built without NUMA support. 2023-10-30 17:15:48.092895: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1886] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 20946 MB memory: -> device: 0, name: NVIDIA GeForce RTX 3090 Ti, pci bus id: 0000:43:00.0, compute capability: 8.6 2023-10-30 17:18:07.718901: I tensorflow/tsl/platform/default/subprocess.cc:304] Start cannot spawn child process: No such file or directory
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= lstm (LSTM) (None, 165, 50) 10400 dropout (Dropout) (None, 165, 50) 0 lstm_1 (LSTM) (None, 165, 50) 20200 dropout_1 (Dropout) (None, 165, 50) 0 lstm_2 (LSTM) (None, 165, 50) 20200 dropout_2 (Dropout) (None, 165, 50) 0 lstm_3 (LSTM) (None, 50) 20200 dropout_3 (Dropout) (None, 50) 0 dense (Dense) (None, 1) 51 ================================================================= Total params: 71051 (277.54 KB) Trainable params: 71051 (277.54 KB) Non-trainable params: 0 (0.00 Byte) _________________________________________________________________
googl_df = yf.download("GOOGL", start="2010-01-01", end="2023-10-26")
googl_df
[*********************100%%**********************] 1 of 1 completed
Open | High | Low | Close | Adj Close | Volume | |
---|---|---|---|---|---|---|
Date | ||||||
2010-01-04 | 15.689439 | 15.753504 | 15.621622 | 15.684434 | 15.684434 | 78169752 |
2010-01-05 | 15.695195 | 15.711712 | 15.554054 | 15.615365 | 15.615365 | 120067812 |
2010-01-06 | 15.662162 | 15.662162 | 15.174174 | 15.221722 | 15.221722 | 158988852 |
2010-01-07 | 15.250250 | 15.265265 | 14.831081 | 14.867367 | 14.867367 | 256315428 |
2010-01-08 | 14.814815 | 15.096346 | 14.742492 | 15.065566 | 15.065566 | 188783028 |
... | ... | ... | ... | ... | ... | ... |
2023-10-19 | 138.500000 | 139.660004 | 137.380005 | 137.750000 | 137.750000 | 26066000 |
2023-10-20 | 137.330002 | 137.869995 | 135.080002 | 135.600006 | 135.600006 | 26315200 |
2023-10-23 | 135.039993 | 137.660004 | 133.949997 | 136.500000 | 136.500000 | 26317900 |
2023-10-24 | 137.830002 | 139.360001 | 137.419998 | 138.809998 | 138.809998 | 44814300 |
2023-10-25 | 128.160004 | 128.309998 | 125.070000 | 125.610001 | 125.610001 | 84366200 |
3477 rows × 6 columns
# Plot Adjusted Close price
googl_df['Adj Close'].plot(grid = True)
sns.set(rc={'figure.figsize':(16, 8)})
plt.title('GOOGL Adjusted Close Price', color = 'black', fontsize = 20)
plt.xlabel('Year', color = 'black', fontsize = 15)
plt.ylabel('Stock price', color = 'black', fontsize = 15);
# create dataframe for adjusted closing price
googl_adj = googl_df[['Adj Close']]
googl_adj
Adj Close | |
---|---|
Date | |
2010-01-04 | 15.684434 |
2010-01-05 | 15.615365 |
2010-01-06 | 15.221722 |
2010-01-07 | 14.867367 |
2010-01-08 | 15.065566 |
... | ... |
2023-10-19 | 137.750000 |
2023-10-20 | 135.600006 |
2023-10-23 | 136.500000 |
2023-10-24 | 138.809998 |
2023-10-25 | 125.610001 |
3477 rows × 1 columns
# Convert DataFrame to numpy array
googl_adj_arr = googl_adj.values
# Find number of rows to train model on (80% of data set)
training_data_len = int(0.8*len(googl_adj))
training_data_len
2781
# Create train data set
train = googl_adj_arr[0:training_data_len, :]
# Normalize the data
scaler = MinMaxScaler(feature_range=(0,1))
train_scaled = scaler.fit_transform(train)
train_scaled
array([[0.05742836], [0.05659711], [0.05185961], ..., [0.90826299], [0.94247261], [1. ]])
train_scaled.shape
(2781, 1)
# Creating a data structure with 60 time-steps and 1 output
# Split data into X_train and y_train data sets
X_train = []
y_train = []
# Creating a data structure with 165 time-steps and 1 output
for i in range(165, len(train_scaled)):
X_train.append(train_scaled[i-165:i, 0])
y_train.append(train_scaled[i:i+1, 0])
if i <= 166: # 165 days for first pass, 166 for second
print(X_train)
print(y_train)
print()
[array([0.05742836, 0.05659711, 0.05185961, 0.04759494, 0.04998027, 0.0497062 , 0.04650469, 0.0454837 , 0.04631495, 0.04334836, 0.04564332, 0.04347183, 0.04424586, 0.03431607, 0.03130129, 0.03203013, 0.03193375, 0.02958157, 0.02827145, 0.02919907, 0.02862684, 0.03154825, 0.02731973, 0.02867804, 0.0293346 , 0.03022909, 0.02962976, 0.03021704, 0.02922919, 0.03169282, 0.03076218, 0.03227108, 0.03153018, 0.03214458, 0.02981648, 0.02873224, 0.02721432, 0.02732575, 0.02909969, 0.03162053, 0.03290356, 0.03569545, 0.03859276, 0.03807173, 0.03738205, 0.04227918, 0.0436917 , 0.04320981, 0.03828256, 0.03889094, 0.03899937, 0.03925235, 0.03732482, 0.03657188, 0.03401188, 0.03652067, 0.03819221, 0.03813498, 0.0380627 , 0.03934572, 0.0394692 , 0.03997517, 0.04064078, 0.03980049, 0.03839099, 0.03958063, 0.03919814, 0.04115881, 0.04538732, 0.04605895, 0.04795635, 0.03435824, 0.03434317, 0.03583098, 0.03560811, 0.03342759, 0.03280415, 0.02878345, 0.02800642, 0.02804557, 0.02889186, 0.02699446, 0.02847023, 0.02117271, 0.0221937 , 0.01885365, 0.01718815, 0.02577469, 0.02197987, 0.02087756, 0.02253103, 0.02152207, 0.02165459, 0.0187633 , 0.01757666, 0.01172781, 0.01083632, 0.01237534, 0.01234824, 0.01186636, 0.016381 , 0.0149263 , 0.01394446, 0.01725742, 0.02094081, 0.01886871, 0.01489318, 0.01467031, 0.01142964, 0.01534193, 0.01579069, 0.01419144, 0.01864884, 0.01963671, 0.01927832, 0.01926325, 0.01580875, 0.01511303, 0.01384809, 0.01175491, 0.01102607, 0.01084537, 0.0054784 , 0.00267444, 0.00103001, 0.00014456, 0. , 0.00425561, 0.00617109, 0.00946296, 0.01197478, 0.0160015 , 0.01664603, 0.01745318, 0.00708969, 0.00906842, 0.01370955, 0.01247773, 0.01467934, 0.01626053, 0.01593224, 0.01703455, 0.0145408 , 0.01473355, 0.01469138, 0.01636594, 0.01619125, 0.02115766, 0.02169374, 0.01932047, 0.0208655 , 0.02037158, 0.0167665 , 0.01684782, 0.01514316, 0.01491426, 0.01639906, 0.01387821, 0.00960753, 0.00781553, 0.00843294, 0.00461402, 0.00558682, 0.00449053, 0.00685477])] [array([0.00500555])] [array([0.05742836, 0.05659711, 0.05185961, 0.04759494, 0.04998027, 0.0497062 , 0.04650469, 0.0454837 , 0.04631495, 0.04334836, 0.04564332, 0.04347183, 0.04424586, 0.03431607, 0.03130129, 0.03203013, 0.03193375, 0.02958157, 0.02827145, 0.02919907, 0.02862684, 0.03154825, 0.02731973, 0.02867804, 0.0293346 , 0.03022909, 0.02962976, 0.03021704, 0.02922919, 0.03169282, 0.03076218, 0.03227108, 0.03153018, 0.03214458, 0.02981648, 0.02873224, 0.02721432, 0.02732575, 0.02909969, 0.03162053, 0.03290356, 0.03569545, 0.03859276, 0.03807173, 0.03738205, 0.04227918, 0.0436917 , 0.04320981, 0.03828256, 0.03889094, 0.03899937, 0.03925235, 0.03732482, 0.03657188, 0.03401188, 0.03652067, 0.03819221, 0.03813498, 0.0380627 , 0.03934572, 0.0394692 , 0.03997517, 0.04064078, 0.03980049, 0.03839099, 0.03958063, 0.03919814, 0.04115881, 0.04538732, 0.04605895, 0.04795635, 0.03435824, 0.03434317, 0.03583098, 0.03560811, 0.03342759, 0.03280415, 0.02878345, 0.02800642, 0.02804557, 0.02889186, 0.02699446, 0.02847023, 0.02117271, 0.0221937 , 0.01885365, 0.01718815, 0.02577469, 0.02197987, 0.02087756, 0.02253103, 0.02152207, 0.02165459, 0.0187633 , 0.01757666, 0.01172781, 0.01083632, 0.01237534, 0.01234824, 0.01186636, 0.016381 , 0.0149263 , 0.01394446, 0.01725742, 0.02094081, 0.01886871, 0.01489318, 0.01467031, 0.01142964, 0.01534193, 0.01579069, 0.01419144, 0.01864884, 0.01963671, 0.01927832, 0.01926325, 0.01580875, 0.01511303, 0.01384809, 0.01175491, 0.01102607, 0.01084537, 0.0054784 , 0.00267444, 0.00103001, 0.00014456, 0. , 0.00425561, 0.00617109, 0.00946296, 0.01197478, 0.0160015 , 0.01664603, 0.01745318, 0.00708969, 0.00906842, 0.01370955, 0.01247773, 0.01467934, 0.01626053, 0.01593224, 0.01703455, 0.0145408 , 0.01473355, 0.01469138, 0.01636594, 0.01619125, 0.02115766, 0.02169374, 0.01932047, 0.0208655 , 0.02037158, 0.0167665 , 0.01684782, 0.01514316, 0.01491426, 0.01639906, 0.01387821, 0.00960753, 0.00781553, 0.00843294, 0.00461402, 0.00558682, 0.00449053, 0.00685477]), array([0.05659711, 0.05185961, 0.04759494, 0.04998027, 0.0497062 , 0.04650469, 0.0454837 , 0.04631495, 0.04334836, 0.04564332, 0.04347183, 0.04424586, 0.03431607, 0.03130129, 0.03203013, 0.03193375, 0.02958157, 0.02827145, 0.02919907, 0.02862684, 0.03154825, 0.02731973, 0.02867804, 0.0293346 , 0.03022909, 0.02962976, 0.03021704, 0.02922919, 0.03169282, 0.03076218, 0.03227108, 0.03153018, 0.03214458, 0.02981648, 0.02873224, 0.02721432, 0.02732575, 0.02909969, 0.03162053, 0.03290356, 0.03569545, 0.03859276, 0.03807173, 0.03738205, 0.04227918, 0.0436917 , 0.04320981, 0.03828256, 0.03889094, 0.03899937, 0.03925235, 0.03732482, 0.03657188, 0.03401188, 0.03652067, 0.03819221, 0.03813498, 0.0380627 , 0.03934572, 0.0394692 , 0.03997517, 0.04064078, 0.03980049, 0.03839099, 0.03958063, 0.03919814, 0.04115881, 0.04538732, 0.04605895, 0.04795635, 0.03435824, 0.03434317, 0.03583098, 0.03560811, 0.03342759, 0.03280415, 0.02878345, 0.02800642, 0.02804557, 0.02889186, 0.02699446, 0.02847023, 0.02117271, 0.0221937 , 0.01885365, 0.01718815, 0.02577469, 0.02197987, 0.02087756, 0.02253103, 0.02152207, 0.02165459, 0.0187633 , 0.01757666, 0.01172781, 0.01083632, 0.01237534, 0.01234824, 0.01186636, 0.016381 , 0.0149263 , 0.01394446, 0.01725742, 0.02094081, 0.01886871, 0.01489318, 0.01467031, 0.01142964, 0.01534193, 0.01579069, 0.01419144, 0.01864884, 0.01963671, 0.01927832, 0.01926325, 0.01580875, 0.01511303, 0.01384809, 0.01175491, 0.01102607, 0.01084537, 0.0054784 , 0.00267444, 0.00103001, 0.00014456, 0. , 0.00425561, 0.00617109, 0.00946296, 0.01197478, 0.0160015 , 0.01664603, 0.01745318, 0.00708969, 0.00906842, 0.01370955, 0.01247773, 0.01467934, 0.01626053, 0.01593224, 0.01703455, 0.0145408 , 0.01473355, 0.01469138, 0.01636594, 0.01619125, 0.02115766, 0.02169374, 0.01932047, 0.0208655 , 0.02037158, 0.0167665 , 0.01684782, 0.01514316, 0.01491426, 0.01639906, 0.01387821, 0.00960753, 0.00781553, 0.00843294, 0.00461402, 0.00558682, 0.00449053, 0.00685477, 0.00500555])] [array([0.00500555]), array([0.0042014])]
# Convert X_train and y_train to numpy arrays for training GRU model
X_train, y_train = np.array(X_train), np.array(y_train)
X_train, y_train = np.array(X_train), np.array(y_train)
X_train = np.reshape(X_train, (X_train.shape[0], X_train.shape[1], 1))
print(X_train.shape)
(2616, 165, 1)
# Reshape the data as LSTM expects 3-D data (samples, time steps, features)
X_train = np.reshape(X_train, (X_train.shape[0], X_train.shape[1], 1))
X_train.shape
(2616, 165, 1)
test = googl_adj_arr[training_data_len: , :]
test.shape
(696, 1)
inputs = googl_adj_arr[len(googl_adj_arr) - len(test) - 165:]
inputs = inputs.reshape(-1,1)
inputs = scaler.transform(inputs)
# Create data sets X_test and y_test
X_test = []
y_test = googl_adj_arr[training_data_len:, :]
for i in range(165,inputs.shape[0]):
X_test.append(inputs[i-165:i,0])
# Convert df back to numpy array
X_test = np.array(X_test)
# Reshape data into 3D (because LSTM)
X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1))
print(X_test.shape)
(696, 165, 1)
# We want predictions to contain same values as y_test data set
LSTM_predictions = LSTM_model.predict(X_test)
LSTM_predictions = scaler.inverse_transform(LSTM_predictions)
1/22 [>.............................] - ETA: 37s
2023-10-30 17:19:39.114899: I tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:442] Loaded cuDNN version 8700
22/22 [==============================] - 2s 29ms/step
LSTM_predictions.shape
(696, 1)
rmse = np.sqrt(np.mean(LSTM_predictions - y_test)**2)
rmse
5.320489258601747
# Plot predicted vs actual values
train = googl_adj[:training_data_len]
test = googl_adj[training_data_len:]
test['LSTM_Predictions'] = LSTM_predictions
plt.figure(figsize=(20,10))
sns.set_style("whitegrid")
plt.plot(train['Adj Close'], label='Training')
plt.plot(test['Adj Close'], label='Actual')
plt.plot(test['LSTM_predictions'], label='Predicted')
plt.title("GOOGL Close Price - LSTM", color = 'black', fontsize = 20)
plt.xlabel('Year', color = 'black', fontsize = 15)
plt.ylabel('Stock Price', color = 'black', fontsize = 15)
plt.legend();
--------------------------------------------------------------------------- KeyError Traceback (most recent call last) File ~/.local/lib/python3.10/site-packages/pandas/core/indexes/base.py:3790, in Index.get_loc(self, key) 3789 try: -> 3790 return self._engine.get_loc(casted_key) 3791 except KeyError as err: File index.pyx:152, in pandas._libs.index.IndexEngine.get_loc() File index.pyx:181, in pandas._libs.index.IndexEngine.get_loc() File pandas/_libs/hashtable_class_helper.pxi:7080, in pandas._libs.hashtable.PyObjectHashTable.get_item() File pandas/_libs/hashtable_class_helper.pxi:7088, in pandas._libs.hashtable.PyObjectHashTable.get_item() KeyError: 'LSTM_predictions' The above exception was the direct cause of the following exception: KeyError Traceback (most recent call last) Cell In[25], line 10 8 plt.plot(train['Adj Close'], label='Training') 9 plt.plot(test['Adj Close'], label='Actual') ---> 10 plt.plot(test['LSTM_predictions'], label='Predicted') 11 plt.title("GOOGL Close Price - LSTM", color = 'black', fontsize = 20) 12 plt.xlabel('Year', color = 'black', fontsize = 15) File ~/.local/lib/python3.10/site-packages/pandas/core/frame.py:3896, in DataFrame.__getitem__(self, key) 3894 if self.columns.nlevels > 1: 3895 return self._getitem_multilevel(key) -> 3896 indexer = self.columns.get_loc(key) 3897 if is_integer(indexer): 3898 indexer = [indexer] File ~/.local/lib/python3.10/site-packages/pandas/core/indexes/base.py:3797, in Index.get_loc(self, key) 3792 if isinstance(casted_key, slice) or ( 3793 isinstance(casted_key, abc.Iterable) 3794 and any(isinstance(x, slice) for x in casted_key) 3795 ): 3796 raise InvalidIndexError(key) -> 3797 raise KeyError(key) from err 3798 except TypeError: 3799 # If we have a listlike key, _check_indexing_error will raise 3800 # InvalidIndexError. Otherwise we fall through and re-raise 3801 # the TypeError. 3802 self._check_indexing_error(key) KeyError: 'LSTM_predictions'
test
# Predict Adjusted Close price for GOOGL stock for October 27th 2023
# get the quote
GOOGL_quote = yf.download("GOOGL", start="2015-07-10", end="2023-10-27")
# Create new df
new_df = GOOGL_quote.filter(['Adj Close'])
# Get last 165 days closing price and convert df to array])
last_165_days = new_df[-165:].values
# Scale data to be values between 0 and 1
last_165_days_scaled = scaler.transform(last_165_days)
# Create empty list
X_test = []
# Append last 165 days
X_test.append(last_165_days_scaled)
# Convert the X_test data set to numpy array
X_test = np.array(X_test)
# Reshape data
X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1))
# Get predicted price
pred_price = LSTM_model.predict(X_test)
# Undo the scaling
pred_price = scaler.inverse_transform(pred_price)
print(pred_price)
[*********************100%%**********************] 1 of 1 completed 1/1 [==============================] - 0s 57ms/step [[121.997826]]
# Actual price
GOOGL_quote2 = yf.download("GOOGL", start="2023-10-26", end="2023-10-27")
print(GOOGL_quote2['Adj Close'])
[*********************100%%**********************] 1 of 1 completed Date 2023-10-26 122.279999 Name: Adj Close, dtype: float64
Percent_Error = [(122.279999 - 121.997826) / (122.279999)]
print(Percent_Error)
[0.0023075973365030878]