Name:Muhammad Hermen Khan
Enroll:01-136221-054
BS-AI-6A
Dataset link:https://www.kaggle.com/datasets/ysthehurricane/tesla-stock-data-20162021
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error
from keras.models import Sequential
from keras.layers import LSTM, SimpleRNN, Dense
# Load the dataset
file_path = '/content/TSLA.csv' # Path to the uploaded dataset
df = pd.read_csv(file_path)
# Preprocessing
df['Date'] = pd.to_datetime(df['Date']) # Ensure Date column is in
datetime format
df.set_index('Date', inplace=True) # Set Date as the index
data = df['Close'].values.reshape(-1, 1) # Use the Close column for
predictions
# Normalize the data
scaler = MinMaxScaler(feature_range=(0, 1))
scaled_data = scaler.fit_transform(data)
# Create sequences
def create_sequences(data, seq_length):
X, y = [], []
for i in range(len(data) - seq_length):
X.append(data[i:i + seq_length])
y.append(data[i + seq_length])
return np.array(X), np.array(y)
seq_length = 30 # Number of days to look back
X, y = create_sequences(scaled_data, seq_length)
# Split into training and testing sets
train_size = int(len(X) * 0.8)
X_train, X_test = X[:train_size], X[train_size:]
y_train, y_test = y[:train_size], y[train_size:]
# Build the LSTM model
lstm_model = Sequential([
LSTM(50, activation='relu', input_shape=(seq_length, 1)),
Dense(1)
])
lstm_model.compile(optimizer='adam', loss='mean_squared_error')
# Train the LSTM model
print("Training LSTM model...")
lstm_model.fit(X_train, y_train, epochs=20, batch_size=32, verbose=1,
validation_split=0.2)
# Build the RNN model
rnn_model = Sequential([
SimpleRNN(50, activation='relu', input_shape=(seq_length, 1)),
Dense(1)
])
rnn_model.compile(optimizer='adam', loss='mean_squared_error')
# Train the RNN model
print("Training RNN model...")
rnn_model.fit(X_train, y_train, epochs=20, batch_size=32, verbose=1,
validation_split=0.2)
# Make predictions
print("Making predictions...")
lstm_preds = lstm_model.predict(X_test)
rnn_preds = rnn_model.predict(X_test)
# Inverse transform predictions and actual values
lstm_preds = scaler.inverse_transform(lstm_preds)
rnn_preds = scaler.inverse_transform(rnn_preds)
y_test_original = scaler.inverse_transform(y_test)
# Calculate RMSE
lstm_rmse = np.sqrt(mean_squared_error(y_test_original, lstm_preds))
rnn_rmse = np.sqrt(mean_squared_error(y_test_original, rnn_preds))
print(f"LSTM RMSE: {lstm_rmse}")
print(f"RNN RMSE: {rnn_rmse}")
# Plot predictions
plt.figure(figsize=(12, 6))
plt.plot(y_test_original, label='Actual Data', color='black')
plt.plot(lstm_preds, label='LSTM Predictions', color='blue')
plt.plot(rnn_preds, label='RNN Predictions', color='red')
plt.title('Model Predictions vs Actual Data')
plt.legend()
plt.show()
/usr/local/lib/python3.10/dist-packages/keras/src/layers/rnn/
rnn.py:204: UserWarning: Do not pass an `input_shape`/`input_dim`
argument to a layer. When using Sequential models, prefer using an
`Input(shape)` object as the first layer in the model instead.
super().__init__(**kwargs)
Training LSTM model...
Epoch 1/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 4s 73ms/step - loss: 2.1084e-04 - val_loss:
0.0120
Epoch 2/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 2s 9ms/step - loss: 5.2438e-05 - val_loss:
0.0090
Epoch 3/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 8ms/step - loss: 2.2020e-05 - val_loss:
0.0064
Epoch 4/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 9ms/step - loss: 1.4958e-05 - val_loss:
0.0065
Epoch 5/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 8ms/step - loss: 1.2804e-05 - val_loss:
0.0062
Epoch 6/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 9ms/step - loss: 1.2899e-05 - val_loss:
0.0061
Epoch 7/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 9ms/step - loss: 1.5403e-05 - val_loss:
0.0062
Epoch 8/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 9ms/step - loss: 1.2480e-05 - val_loss:
0.0059
Epoch 9/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 8ms/step - loss: 1.2517e-05 - val_loss:
0.0059
Epoch 10/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 7ms/step - loss: 1.3148e-05 - val_loss:
0.0059
Epoch 11/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 7ms/step - loss: 1.2874e-05 - val_loss:
0.0057
Epoch 12/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 7ms/step - loss: 1.2026e-05 - val_loss:
0.0055
Epoch 13/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 1.2173e-05 - val_loss:
0.0053
Epoch 14/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 7ms/step - loss: 1.2124e-05 - val_loss:
0.0054
Epoch 15/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 1.2248e-05 - val_loss:
0.0053
Epoch 16/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 7ms/step - loss: 1.1315e-05 - val_loss:
0.0052
Epoch 17/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 7ms/step - loss: 1.1597e-05 - val_loss:
0.0050
Epoch 18/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 1.1599e-05 - val_loss:
0.0051
Epoch 19/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 1.0993e-05 - val_loss:
0.0049
Epoch 20/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 1.2103e-05 - val_loss:
0.0047
Training RNN model...
Epoch 1/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 3s 53ms/step - loss: 4.5537e-04 - val_loss:
0.0178
Epoch 2/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 5.9437e-05 - val_loss:
0.0096
Epoch 3/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 1.2214e-05 - val_loss:
0.0078
Epoch 4/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - loss: 9.9244e-06 - val_loss:
0.0071
Epoch 5/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - loss: 8.2430e-06 - val_loss:
0.0074
Epoch 6/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 7ms/step - loss: 8.1721e-06 - val_loss:
0.0072
Epoch 7/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 8.4089e-06 - val_loss:
0.0071
Epoch 8/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 7.1831e-06 - val_loss:
0.0072
Epoch 9/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 8.3751e-06 - val_loss:
0.0068
Epoch 10/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 7ms/step - loss: 7.0703e-06 - val_loss:
0.0065
Epoch 11/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 7.0634e-06 - val_loss:
0.0064
Epoch 12/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 6.9160e-06 - val_loss:
0.0065
Epoch 13/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 7.4239e-06 - val_loss:
0.0064
Epoch 14/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 7ms/step - loss: 7.1159e-06 - val_loss:
0.0060
Epoch 15/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 7.5006e-06 - val_loss:
0.0060
Epoch 16/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 5.6439e-06 - val_loss:
0.0062
Epoch 17/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step - loss: 6.1318e-06 - val_loss:
0.0061
Epoch 18/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 8ms/step - loss: 7.5280e-06 - val_loss:
0.0057
Epoch 19/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 7ms/step - loss: 5.8094e-06 - val_loss:
0.0059
Epoch 20/20
25/25 ━━━━━━━━━━━━━━━━━━━━ 0s 7ms/step - loss: 6.3491e-06 - val_loss:
0.0057
Making predictions...
8/8 ━━━━━━━━━━━━━━━━━━━━ 1s 82ms/step
8/8 ━━━━━━━━━━━━━━━━━━━━ 1s 45ms/step
LSTM RMSE: 244.72772221953448
RNN RMSE: 319.78993041363367