4/2/25, 11:03 PM 2441052_DL3 - Jupyter Notebook
In [ ]: Name: Amruta Deokate
Roll No: 2441029
Batch: A
Assigment No: 03
In [ ]: import pandas as pd
import numpy as np
from sklearn.preprocessing import MinMaxScaler
In [ ]: # Load dataset
data = pd.read_csv('google_stock_price.csv')
In [3]: data
Out[3]: Date Open High Low Close Adj Close Volume
0 2004-08-19 2.490664 2.591785 2.390042 2.499133 2.499133 897427216
1 2004-08-20 2.515820 2.716817 2.503118 2.697639 2.697639 458857488
2 2004-08-23 2.758411 2.826406 2.716070 2.724787 2.724787 366857939
3 2004-08-24 2.770615 2.779581 2.579581 2.611960 2.611960 306396159
4 2004-08-25 2.614201 2.689918 2.587302 2.640104 2.640104 184645512
... ... ... ... ... ... ... ...
4837 2023-11-06 130.220001 131.559998 129.929993 131.449997 131.449997 15360400
4838 2023-11-07 131.979996 133.279999 131.139999 132.399994 132.399994 19223800
4839 2023-11-08 132.360001 133.539993 132.160004 133.259995 133.259995 15093600
4840 2023-11-09 133.360001 133.960007 131.509995 131.690002 131.690002 17976500
4841 2023-11-10 131.529999 134.270004 130.869995 134.059998 134.059998 20872900
4842 rows × 7 columns
In [4]: data.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 4842 entries, 0 to 4841
Data columns (total 7 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 Date 4842 non-null object
1 Open 4842 non-null float64
2 High 4842 non-null float64
3 Low 4842 non-null float64
4 Close 4842 non-null float64
5 Adj Close 4842 non-null float64
6 Volume 4842 non-null int64
dtypes: float64(5), int64(1), object(1)
memory usage: 264.9+ KB
localhost:8888/notebooks/DL Practical/2441052_DL3.ipynb 1/7
4/2/25, 11:03 PM 2441052_DL3 - Jupyter Notebook
In [5]: # Selecting 'Open' price for prediction
stock_prices = data['Open'].values.reshape(-1, 1)
In [6]: # Normalizing the data
scaler = MinMaxScaler(feature_range=(0, 1))
scaled_data = scaler.fit_transform(stock_prices)
In [7]: # Creating time-lagged features
X, y = [], []
for i in range(60, len(scaled_data)):
X.append(scaled_data[i-60:i, 0])
y.append(scaled_data[i, 0])
In [8]: X, y = np.array(X), np.array(y)
X = np.reshape(X, (X.shape[0], X.shape[1], 1))
In [10]: import matplotlib.pyplot as plt
plt.figure(figsize=(5, 5))
plt.subplots_adjust(top=1.25, bottom=1.2)
data['Adj Close'].plot()
plt.ylabel('Adj Close')
plt.xlabel(None)
plt.title(f"Closing Price of Google")
plt.tight_layout()
localhost:8888/notebooks/DL Practical/2441052_DL3.ipynb 2/7
4/2/25, 11:03 PM 2441052_DL3 - Jupyter Notebook
In [13]: import matplotlib.pyplot as plt
# Plotting the stock prices
plt.figure(figsize=(14, 7))
plt.plot(data['Open'], label='Google Stock Price')
plt.title('Google Stock Price History')
plt.xlabel('Date')
plt.ylabel('Stock Price')
plt.legend()
plt.show()
In [15]: # Now let's plot the total volume of stock being traded each day
plt.figure(figsize=(15, 20))
plt.subplots_adjust(top=1.25, bottom=1.2)
data['Volume'].plot()
plt.ylabel('Volume')
plt.xlabel(None)
plt.title(f"Sales Volume")
Out[15]: Text(0.5, 1.0, 'Sales Volume')
localhost:8888/notebooks/DL Practical/2441052_DL3.ipynb 3/7
4/2/25, 11:03 PM 2441052_DL3 - Jupyter Notebook
In [17]: from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import SimpleRNN, Dense,Dropout
# Building the RNN model
model = Sequential([
SimpleRNN(units=50, return_sequences=True,
input_shape=(X.shape[1], 1)),
Dropout(0.2),
SimpleRNN(units=50),
Dropout(0.2),
Dense(1) # Output layer for regression
])
# Compile the model
model.compile(optimizer='adam', loss='mean_squared_error')
# Train the model
history = model.fit(X, y, epochs=50, batch_size=32,
validation_split=0.2)
120/120 6s 37ms/step loss: 5.5115e 04 val_loss:
0.0032
Epoch 10/50
120/120 ━━━━━━━━━━━━━━━━━━━━ 4s 33ms/step - loss: 4.6350e-04 - val_loss:
0.0038
Epoch 11/50
120/120 ━━━━━━━━━━━━━━━━━━━━ 5s 31ms/step - loss: 3.9487e-04 - val_loss:
0.0016
Epoch 12/50
120/120 ━━━━━━━━━━━━━━━━━━━━ 5s 30ms/step - loss: 3.7323e-04 - val_loss:
0.0027
Epoch 13/50
120/120 ━━━━━━━━━━━━━━━━━━━━ 5s 29ms/step - loss: 3.5772e-04 - val_loss:
0.0012
Epoch 14/50
120/120 ━━━━━━━━━━━━━━━━━━━━ 6s 40ms/step - loss: 3.6349e-04 - val_loss:
0.0013
Epoch 15/50
120/120 ━━━━━━━━━━━━━━━━━━━━ 4s 27ms/step - loss: 2.8309e-04 - val_loss:
0.0010
Epoch 16/50
In [19]: # Making predictions
predicted_stock_price = model.predict(X)
predicted_stock_price =scaler.inverse_transform(predicted_stock_price)
150/150 ━━━━━━━━━━━━━━━━━━━━ 2s 9ms/step
In [20]: # Actual stock prices
actual_stock_price = scaler.inverse_transform(y.reshape(-1,1))
localhost:8888/notebooks/DL Practical/2441052_DL3.ipynb 4/7
4/2/25, 11:03 PM 2441052_DL3 - Jupyter Notebook
In [21]: # Plotting the results
plt.figure(figsize=(14, 7))
plt.plot(actual_stock_price, color='blue', label='ActualGoogle Stock Price')
plt.plot(predicted_stock_price, color='red',
label='Predicted Google Stock Price')
plt.title('Google Stock Price Prediction')
plt.xlabel('Time')
plt.ylabel('Google Stock Price')
plt.legend()
plt.show()
Using LSTM
In [22]: from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
localhost:8888/notebooks/DL Practical/2441052_DL3.ipynb 5/7
4/2/25, 11:03 PM 2441052_DL3 - Jupyter Notebook
In [24]: regressor = Sequential()
regressor.add(LSTM(units=4, activation= 'sigmoid', input_shape= (None,1)))
regressor.add(Dense( units=1 ))
regressor.compile(optimizer='adam', loss='mean_squared_error')
regressor.fit(X, y, batch_size=32, epochs=200)
150/150 4s 14ms/step loss: 0.0077
Epoch 9/200
150/150 ━━━━━━━━━━━━━━━━━━━━ 3s 15ms/step - loss: 0.0030
Epoch 10/200
150/150 ━━━━━━━━━━━━━━━━━━━━ 2s 14ms/step - loss: 0.0012
Epoch 11/200
150/150 ━━━━━━━━━━━━━━━━━━━━ 3s 21ms/step - loss: 6.3420e-04
Epoch 12/200
150/150 ━━━━━━━━━━━━━━━━━━━━ 2s 15ms/step - loss: 4.5504e-04
Epoch 13/200
150/150 ━━━━━━━━━━━━━━━━━━━━ 2s 15ms/step - loss: 3.5364e-04
Epoch 14/200
150/150 ━━━━━━━━━━━━━━━━━━━━ 3s 15ms/step - loss: 3.1728e-04
Epoch 15/200
150/150 ━━━━━━━━━━━━━━━━━━━━ 3s 14ms/step - loss: 2.7323e-04
Epoch 16/200
150/150 ━━━━━━━━━━━━━━━━━━━━ 3s 21ms/step - loss: 2.4593e-04
Epoch 17/200
150/150 ━━━━━━━━━━━━━━━━━━━━ 4s 15ms/step - loss: 2.1887e-04
Epoch 18/200
150/150 2s 15 / t l 2 1428 04
In [25]: # Actual stock prices
actual_stock_price = scaler.inverse_transform(y.reshape(-1,1))
localhost:8888/notebooks/DL Practical/2441052_DL3.ipynb 6/7
4/2/25, 11:03 PM 2441052_DL3 - Jupyter Notebook
In [28]: plt.plot( actual_stock_price , color = 'red' , label = 'Real Google Stock Pric
plt.plot( predicted_stock_price , color = 'blue' , label = 'Predicted Google S
plt.title('Google Stock Price Prediction')
plt.xlabel( 'time' )
plt.ylabel( 'Google Stock Price' )
plt.legend()
plt.show()
In [ ]:
localhost:8888/notebooks/DL Practical/2441052_DL3.ipynb 7/7