In [1]:
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
from pandas import read_csv

from tensorflow.keras import models
from tensorflow.keras import layers

from keras.models import Sequential
from keras.layers import Dense

from keras.wrappers.scikit_learn import KerasRegressor
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.pipeline import Pipeline
from sklearn.model_selection import train_test_split
from matplotlib import pyplot as plt
import warnings
warnings.filterwarnings("ignore")
Out[1]:
timestamp MQ136_CO MQ136_H2S MQ136_NH4 MQ2_Alc MQ2_CO MQ2_H2 MQ2_LPG MQ2_Prop MQ3_Alc MQ3_Ben MQ3_CH4 MQ3_CO MQ3_Hex MQ3_LPG
0 1682089525 5318.23 325.00 453.26 52.85 285.72 30.79 16.98 21.65 0.242456 2.039812 402.45 132.78 3071.24 270.88
1 1682089526 15481.35 865.08 910.13 81.53 447.35 44.22 23.85 30.36 0.287120 2.745789 552.55 188.27 4231.60 351.05
2 1682089527 33282.73 1765.11 1498.42 120.35 754.76 61.29 33.14 42.64 0.322711 3.427850 675.21 244.63 5319.82 426.62
3 1682089528 41759.34 2275.62 1799.59 151.25 958.53 72.42 40.87 49.70 0.325958 3.738615 764.57 268.36 5912.26 455.27
4 1682089529 55179.24 3027.17 2194.09 255.54 1788.28 110.05 61.57 76.49 0.372978 4.443688 916.14 342.01 6980.70 528.91
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
1184004 1683306126 263.13 19.69 64.73 28.73 132.58 19.87 10.38 12.98 0.153980 0.905993 178.67 52.92 1286.80 142.55
1184005 1683306127 263.63 19.79 64.08 28.62 131.13 19.24 10.31 13.06 0.153980 0.917940 178.19 53.40 1281.43 141.77
1184006 1683306128 261.62 19.79 64.41 28.30 134.04 19.57 10.36 12.77 0.154207 0.909561 177.94 54.39 1286.80 141.93
1184007 1683306129 262.62 19.93 64.32 28.40 132.00 19.63 10.26 13.06 0.155917 0.893028 179.65 53.08 1269.00 142.55
1184008 1683306130 260.12 19.83 63.92 28.40 134.33 19.63 10.18 12.77 0.155231 0.909561 178.67 54.47 1277.87 143.48

1184009 rows × 15 columns

In [2]:
import numpy as np
import pandas as pd
import tensorflow as tf
from tensorflow import keras
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
In [4]:
# Assuming the data is stored in a CSV file, load it into a pandas DataFrame
data = pd.read_csv('data4.csv')
data = data[:31276]

# Extract the input features (excluding 'timestamp' and 'MQ136_CO')
X = data.drop(columns=['timestamp', 'MQ136_CO']).values

# Extract the target variable 'MQ136_CO' as the labels
y = data['MQ136_CO'].values

# Split the data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

# Scale the input features
scaler = StandardScaler()
X_train_scaled = scaler.fit_transform(X_train)
X_test_scaled = scaler.transform(X_test)
In [5]:
model = keras.Sequential([
    keras.layers.Dense(64, activation='relu', input_shape=(X_train_scaled.shape[1],)),
    keras.layers.Dense(32, activation='relu'),
    keras.layers.Dense(1)  # Output layer with 1 neuron for regression
])
In [6]:
model.compile(optimizer='adam', loss='mean_squared_error')
In [7]:
num_epochs = 100
batch_size = 32
history = model.fit(X_train_scaled, y_train, epochs=num_epochs, batch_size=batch_size, validation_split=0.1)
Epoch 1/100
704/704 [==============================] - 1s 1ms/step - loss: 5152857.0000 - val_loss: 1051450.7500
Epoch 2/100
704/704 [==============================] - 1s 1ms/step - loss: 824725.3750 - val_loss: 537839.5000
Epoch 3/100
704/704 [==============================] - 1s 1ms/step - loss: 531959.6875 - val_loss: 376191.7812
Epoch 4/100
704/704 [==============================] - 1s 1ms/step - loss: 370964.5000 - val_loss: 228980.1094
Epoch 5/100
704/704 [==============================] - 1s 954us/step - loss: 270064.7188 - val_loss: 187627.6719
Epoch 6/100
704/704 [==============================] - 1s 996us/step - loss: 201201.0781 - val_loss: 156499.9219
Epoch 7/100
704/704 [==============================] - 1s 921us/step - loss: 161791.2812 - val_loss: 134593.2031
Epoch 8/100
704/704 [==============================] - 1s 909us/step - loss: 137109.0469 - val_loss: 121065.6328
Epoch 9/100
704/704 [==============================] - 1s 944us/step - loss: 123548.9688 - val_loss: 107083.4375
Epoch 10/100
704/704 [==============================] - 1s 924us/step - loss: 108412.5469 - val_loss: 88924.6484
Epoch 11/100
704/704 [==============================] - 1s 930us/step - loss: 97174.4453 - val_loss: 88146.6641
Epoch 12/100
704/704 [==============================] - 1s 991us/step - loss: 89870.0938 - val_loss: 76622.7500
Epoch 13/100
704/704 [==============================] - 1s 922us/step - loss: 76890.4453 - val_loss: 66036.2500
Epoch 14/100
704/704 [==============================] - 1s 900us/step - loss: 67690.3594 - val_loss: 61864.2188
Epoch 15/100
704/704 [==============================] - 1s 921us/step - loss: 64334.0625 - val_loss: 51967.2852
Epoch 16/100
704/704 [==============================] - 1s 917us/step - loss: 54646.0000 - val_loss: 44850.1172
Epoch 17/100
704/704 [==============================] - 1s 944us/step - loss: 52214.0938 - val_loss: 45643.4961
Epoch 18/100
704/704 [==============================] - 1s 918us/step - loss: 48644.3984 - val_loss: 38702.6172
Epoch 19/100
704/704 [==============================] - 1s 950us/step - loss: 45149.0078 - val_loss: 35255.2500
Epoch 20/100
704/704 [==============================] - 1s 916us/step - loss: 40864.4102 - val_loss: 40580.3438
Epoch 21/100
704/704 [==============================] - 1s 907us/step - loss: 37815.7891 - val_loss: 29783.3770
Epoch 22/100
704/704 [==============================] - 1s 919us/step - loss: 36096.1875 - val_loss: 29206.0488
Epoch 23/100
704/704 [==============================] - 1s 941us/step - loss: 36122.9648 - val_loss: 32235.5781
Epoch 24/100
704/704 [==============================] - 1s 954us/step - loss: 34018.6133 - val_loss: 23192.1895
Epoch 25/100
704/704 [==============================] - 1s 920us/step - loss: 32178.6309 - val_loss: 25032.0742
Epoch 26/100
704/704 [==============================] - 1s 914us/step - loss: 31690.5391 - val_loss: 20890.0039
Epoch 27/100
704/704 [==============================] - 1s 946us/step - loss: 30377.0918 - val_loss: 22395.0312
Epoch 28/100
704/704 [==============================] - 1s 914us/step - loss: 29048.0215 - val_loss: 19206.8398
Epoch 29/100
704/704 [==============================] - 1s 960us/step - loss: 30652.3613 - val_loss: 19920.5488
Epoch 30/100
704/704 [==============================] - 1s 934us/step - loss: 30598.5566 - val_loss: 18522.0742
Epoch 31/100
704/704 [==============================] - 1s 945us/step - loss: 30847.5938 - val_loss: 18823.6582
Epoch 32/100
704/704 [==============================] - 1s 954us/step - loss: 29810.5801 - val_loss: 25719.6309
Epoch 33/100
704/704 [==============================] - 1s 971us/step - loss: 30936.9941 - val_loss: 18998.4727
Epoch 34/100
704/704 [==============================] - 1s 975us/step - loss: 29148.0586 - val_loss: 25782.7363
Epoch 35/100
704/704 [==============================] - 1s 1ms/step - loss: 29016.1582 - val_loss: 19492.1445
Epoch 36/100
704/704 [==============================] - 1s 967us/step - loss: 28155.5840 - val_loss: 18333.2031
Epoch 37/100
704/704 [==============================] - 1s 923us/step - loss: 29917.1855 - val_loss: 18211.5723
Epoch 38/100
704/704 [==============================] - 1s 1ms/step - loss: 29799.9531 - val_loss: 18647.7129
Epoch 39/100
704/704 [==============================] - 1s 1ms/step - loss: 29617.7500 - val_loss: 18123.8242
Epoch 40/100
704/704 [==============================] - 1s 1ms/step - loss: 29498.4824 - val_loss: 21592.8262
Epoch 41/100
704/704 [==============================] - 1s 955us/step - loss: 27846.1328 - val_loss: 20101.0801
Epoch 42/100
704/704 [==============================] - 1s 925us/step - loss: 26755.8691 - val_loss: 20665.8027
Epoch 43/100
704/704 [==============================] - 1s 927us/step - loss: 29233.5684 - val_loss: 18402.5488
Epoch 44/100
704/704 [==============================] - 1s 989us/step - loss: 26747.4375 - val_loss: 24304.1934
Epoch 45/100
704/704 [==============================] - 1s 961us/step - loss: 29719.4570 - val_loss: 19946.5332
Epoch 46/100
704/704 [==============================] - 1s 942us/step - loss: 31544.8750 - val_loss: 18269.9570
Epoch 47/100
704/704 [==============================] - 1s 1ms/step - loss: 29931.5898 - val_loss: 18583.5527
Epoch 48/100
704/704 [==============================] - 1s 960us/step - loss: 27554.5547 - val_loss: 17885.3340
Epoch 49/100
704/704 [==============================] - 1s 1ms/step - loss: 27907.9043 - val_loss: 23136.6348
Epoch 50/100
704/704 [==============================] - 1s 1ms/step - loss: 27782.9668 - val_loss: 19790.5625
Epoch 51/100
704/704 [==============================] - 1s 998us/step - loss: 29348.0254 - val_loss: 17767.0527
Epoch 52/100
704/704 [==============================] - 1s 942us/step - loss: 28086.2324 - val_loss: 18786.7422
Epoch 53/100
704/704 [==============================] - 1s 976us/step - loss: 27253.8711 - val_loss: 18081.5742
Epoch 54/100
704/704 [==============================] - 1s 1ms/step - loss: 27139.8340 - val_loss: 18924.8984
Epoch 55/100
704/704 [==============================] - 1s 992us/step - loss: 28248.9453 - val_loss: 18837.6016
Epoch 56/100
704/704 [==============================] - 1s 994us/step - loss: 28383.3262 - val_loss: 18280.0996
Epoch 57/100
704/704 [==============================] - 1s 1ms/step - loss: 29237.2715 - val_loss: 18150.2402
Epoch 58/100
704/704 [==============================] - 1s 952us/step - loss: 27967.9355 - val_loss: 17753.2559
Epoch 59/100
704/704 [==============================] - 1s 1ms/step - loss: 29039.3223 - val_loss: 18085.8945
Epoch 60/100
704/704 [==============================] - 1s 1000us/step - loss: 27693.0938 - val_loss: 19619.4668
Epoch 61/100
704/704 [==============================] - 1s 1ms/step - loss: 28610.8477 - val_loss: 18276.2676
Epoch 62/100
704/704 [==============================] - 1s 939us/step - loss: 28180.4004 - val_loss: 18851.6719
Epoch 63/100
704/704 [==============================] - 1s 934us/step - loss: 27853.2188 - val_loss: 18724.0840
Epoch 64/100
704/704 [==============================] - 1s 916us/step - loss: 27905.6992 - val_loss: 18267.2090
Epoch 65/100
704/704 [==============================] - 1s 911us/step - loss: 29355.6270 - val_loss: 18444.0762
Epoch 66/100
704/704 [==============================] - 1s 929us/step - loss: 26469.2695 - val_loss: 20208.7129
Epoch 67/100
704/704 [==============================] - 1s 916us/step - loss: 28444.1680 - val_loss: 18490.1504
Epoch 68/100
704/704 [==============================] - 1s 994us/step - loss: 29570.3711 - val_loss: 18440.9023
Epoch 69/100
704/704 [==============================] - 1s 1ms/step - loss: 29045.4473 - val_loss: 17568.3164
Epoch 70/100
704/704 [==============================] - 1s 940us/step - loss: 27081.7988 - val_loss: 18136.9512
Epoch 71/100
704/704 [==============================] - 1s 943us/step - loss: 29446.9199 - val_loss: 18130.5059
Epoch 72/100
704/704 [==============================] - 1s 953us/step - loss: 28383.1504 - val_loss: 19205.0215
Epoch 73/100
704/704 [==============================] - 1s 992us/step - loss: 27926.0957 - val_loss: 19953.5215
Epoch 74/100
704/704 [==============================] - 1s 1ms/step - loss: 29600.6699 - val_loss: 18724.5469
Epoch 75/100
704/704 [==============================] - 1s 1ms/step - loss: 27701.9961 - val_loss: 17415.9043
Epoch 76/100
704/704 [==============================] - 1s 983us/step - loss: 26568.4668 - val_loss: 27208.8418
Epoch 77/100
704/704 [==============================] - 1s 988us/step - loss: 27869.2070 - val_loss: 25345.9219
Epoch 78/100
704/704 [==============================] - 1s 1ms/step - loss: 28270.1934 - val_loss: 22108.0469
Epoch 79/100
704/704 [==============================] - 1s 911us/step - loss: 28131.3164 - val_loss: 17481.0879
Epoch 80/100
704/704 [==============================] - 1s 993us/step - loss: 26645.5059 - val_loss: 18183.1152
Epoch 81/100
704/704 [==============================] - 1s 1ms/step - loss: 26932.0176 - val_loss: 17574.3477
Epoch 82/100
704/704 [==============================] - 1s 907us/step - loss: 26818.6328 - val_loss: 19009.5176
Epoch 83/100
704/704 [==============================] - 1s 909us/step - loss: 28662.8535 - val_loss: 17780.4707
Epoch 84/100
704/704 [==============================] - 1s 904us/step - loss: 26279.4102 - val_loss: 17990.9297
Epoch 85/100
704/704 [==============================] - 1s 939us/step - loss: 27189.9492 - val_loss: 17358.8184
Epoch 86/100
704/704 [==============================] - 1s 912us/step - loss: 27763.0762 - val_loss: 17730.5859
Epoch 87/100
704/704 [==============================] - 1s 932us/step - loss: 28034.2539 - val_loss: 17878.5215
Epoch 88/100
704/704 [==============================] - 1s 988us/step - loss: 27799.4199 - val_loss: 25897.2910
Epoch 89/100
704/704 [==============================] - 1s 966us/step - loss: 25930.4902 - val_loss: 21286.3438
Epoch 90/100
704/704 [==============================] - 1s 953us/step - loss: 27438.0156 - val_loss: 17690.0879
Epoch 91/100
704/704 [==============================] - 1s 1ms/step - loss: 29207.1719 - val_loss: 18108.9062
Epoch 92/100
704/704 [==============================] - 1s 1ms/step - loss: 26377.4043 - val_loss: 31612.2070
Epoch 93/100
704/704 [==============================] - 1s 1ms/step - loss: 27274.9473 - val_loss: 17859.0625
Epoch 94/100
704/704 [==============================] - 1s 975us/step - loss: 28577.6055 - val_loss: 17775.7207
Epoch 95/100
704/704 [==============================] - 1s 991us/step - loss: 25680.1016 - val_loss: 28983.9336
Epoch 96/100
704/704 [==============================] - 1s 913us/step - loss: 27420.3145 - val_loss: 17981.6855
Epoch 97/100
704/704 [==============================] - 1s 1ms/step - loss: 27030.9668 - val_loss: 26930.1387
Epoch 98/100
704/704 [==============================] - 1s 1ms/step - loss: 28824.2402 - val_loss: 18690.5586
Epoch 99/100
704/704 [==============================] - 1s 932us/step - loss: 26052.0664 - val_loss: 19363.3262
Epoch 100/100
704/704 [==============================] - 1s 945us/step - loss: 26007.7012 - val_loss: 18208.4805
In [8]:
test_loss = model.evaluate(X_test_scaled, y_test)
print("Test loss: {:.2f}".format(test_loss))
196/196 [==============================] - 0s 1ms/step - loss: 19077.4258
Test loss: 19077.43
In [9]:
# Assuming you have a new data point for prediction
new_data = np.array([[865.08, 910.13, 81.53, 447.35, 44.22, 23.85, 30.36, 0.287120, 2.745789, 552.55, 188.27, 4231.60, 351.05]])

# Scale the new data using the same scaler
new_data_scaled = scaler.transform(new_data)

# Make predictions using the trained model
predicted_concentration = model.predict(new_data_scaled)

print("Predicted MQ136_CO concentration: {:.2f}".format(predicted_concentration[0][0]))
1/1 [==============================] - 0s 70ms/step
Predicted MQ136_CO concentration: 16189.71
In [10]:
import numpy as np
import pandas as pd
import tensorflow as tf
from tensorflow.keras import models, layers
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error, mean_absolute_error

# Load the data
data = pd.read_csv('data4.csv')
df = data[:31276]

# Preprocess the data
df = df.drop('timestamp', axis=1)
X = df.drop('MQ136_H2S', axis=1)
y = df['MQ136_H2S']

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=20)

# Scale the data
scaler = StandardScaler().fit(X_train)
X_train_scaled = scaler.transform(X_train)
X_test_scaled = scaler.transform(X_test)

# Define the model
model = models.Sequential()
model.add(layers.Dense(64, activation='relu', input_shape=(X_train_scaled.shape[1],)))
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(1, activation='linear'))

model.compile(loss='mean_squared_error', optimizer='adam', metrics=['mean_absolute_error', 'mean_squared_error'])

# Train the model
history = model.fit(X_train_scaled, y_train, epochs=100, batch_size=10, validation_split=0.2)

# Evaluate the model
test_predictions = model.predict(X_test_scaled)
mse = mean_squared_error(test_predictions, y_test)
mae = mean_absolute_error(test_predictions, y_test)
rmse = np.sqrt(mse)
print("Mean Squared Error (MSE):", mse)
print("Mean Absolute Error (MAE):", mae)
print("Root Mean Squared Error (RMSE):", rmse)

# Compare with Linear Regression
model_multi = LinearRegression().fit(X_train_scaled, y_train)
linear_test_prediction = model_multi.predict(X_test_scaled)
mse_linear = mean_squared_error(linear_test_prediction, y_test)
mae_linear = mean_absolute_error(linear_test_prediction, y_test)
rmse_linear = np.sqrt(mse_linear)
print("\nLinear Regression:")
print("Mean Squared Error (MSE):", mse_linear)
print("Mean Absolute Error (MAE):", mae_linear)
print("Root Mean Squared Error (RMSE):", rmse_linear)
Epoch 1/100
2002/2002 [==============================] - 5s 1ms/step - loss: 2309.3342 - mean_absolute_error: 15.0744 - mean_squared_error: 2309.3342 - val_loss: 1391.8419 - val_mean_absolute_error: 13.2909 - val_mean_squared_error: 1391.8419
Epoch 2/100
2002/2002 [==============================] - 3s 1ms/step - loss: 354.3075 - mean_absolute_error: 6.6027 - mean_squared_error: 354.3075 - val_loss: 102.4357 - val_mean_absolute_error: 4.8388 - val_mean_squared_error: 102.4357
Epoch 3/100
2002/2002 [==============================] - 2s 1ms/step - loss: 269.3313 - mean_absolute_error: 6.2229 - mean_squared_error: 269.3313 - val_loss: 105.8843 - val_mean_absolute_error: 3.9827 - val_mean_squared_error: 105.8843
Epoch 4/100
2002/2002 [==============================] - 2s 1ms/step - loss: 215.1961 - mean_absolute_error: 5.4378 - mean_squared_error: 215.1961 - val_loss: 2451.0935 - val_mean_absolute_error: 16.7120 - val_mean_squared_error: 2451.0935
Epoch 5/100
2002/2002 [==============================] - 2s 1ms/step - loss: 356.7542 - mean_absolute_error: 5.9050 - mean_squared_error: 356.7542 - val_loss: 94.7419 - val_mean_absolute_error: 3.9929 - val_mean_squared_error: 94.7419
Epoch 6/100
2002/2002 [==============================] - 4s 2ms/step - loss: 175.7061 - mean_absolute_error: 5.2334 - mean_squared_error: 175.7061 - val_loss: 77.0499 - val_mean_absolute_error: 3.7137 - val_mean_squared_error: 77.0499
Epoch 7/100
2002/2002 [==============================] - 3s 1ms/step - loss: 335.8687 - mean_absolute_error: 5.6375 - mean_squared_error: 335.8687 - val_loss: 132.8246 - val_mean_absolute_error: 3.5224 - val_mean_squared_error: 132.8246
Epoch 8/100
2002/2002 [==============================] - 2s 1ms/step - loss: 133.0920 - mean_absolute_error: 4.5370 - mean_squared_error: 133.0920 - val_loss: 97.5563 - val_mean_absolute_error: 4.5985 - val_mean_squared_error: 97.5563
Epoch 9/100
2002/2002 [==============================] - 2s 1ms/step - loss: 199.9342 - mean_absolute_error: 5.4999 - mean_squared_error: 199.9342 - val_loss: 169.6759 - val_mean_absolute_error: 3.6462 - val_mean_squared_error: 169.6759
Epoch 10/100
2002/2002 [==============================] - 2s 1ms/step - loss: 196.2631 - mean_absolute_error: 5.0297 - mean_squared_error: 196.2631 - val_loss: 86.5141 - val_mean_absolute_error: 3.9232 - val_mean_squared_error: 86.5141
Epoch 11/100
2002/2002 [==============================] - 2s 1ms/step - loss: 187.3309 - mean_absolute_error: 4.8310 - mean_squared_error: 187.3309 - val_loss: 140.2790 - val_mean_absolute_error: 4.5712 - val_mean_squared_error: 140.2790
Epoch 12/100
2002/2002 [==============================] - 3s 1ms/step - loss: 220.0759 - mean_absolute_error: 5.1387 - mean_squared_error: 220.0759 - val_loss: 108.9341 - val_mean_absolute_error: 3.6845 - val_mean_squared_error: 108.9341
Epoch 13/100
2002/2002 [==============================] - 2s 1ms/step - loss: 195.7965 - mean_absolute_error: 4.7797 - mean_squared_error: 195.7965 - val_loss: 82.3557 - val_mean_absolute_error: 3.1972 - val_mean_squared_error: 82.3557
Epoch 14/100
2002/2002 [==============================] - 2s 1ms/step - loss: 204.3825 - mean_absolute_error: 5.1394 - mean_squared_error: 204.3825 - val_loss: 161.6669 - val_mean_absolute_error: 4.2125 - val_mean_squared_error: 161.6669
Epoch 15/100
2002/2002 [==============================] - 2s 1ms/step - loss: 210.4840 - mean_absolute_error: 4.1373 - mean_squared_error: 210.4840 - val_loss: 90.1952 - val_mean_absolute_error: 3.3777 - val_mean_squared_error: 90.1952
Epoch 16/100
2002/2002 [==============================] - 2s 1ms/step - loss: 149.8719 - mean_absolute_error: 4.2307 - mean_squared_error: 149.8719 - val_loss: 87.4851 - val_mean_absolute_error: 3.1581 - val_mean_squared_error: 87.4851
Epoch 17/100
2002/2002 [==============================] - 2s 1ms/step - loss: 212.1617 - mean_absolute_error: 4.7200 - mean_squared_error: 212.1617 - val_loss: 98.9299 - val_mean_absolute_error: 3.5960 - val_mean_squared_error: 98.9299
Epoch 18/100
2002/2002 [==============================] - 2s 1ms/step - loss: 149.1798 - mean_absolute_error: 4.5383 - mean_squared_error: 149.1798 - val_loss: 92.7567 - val_mean_absolute_error: 3.3590 - val_mean_squared_error: 92.7567
Epoch 19/100
2002/2002 [==============================] - 2s 1ms/step - loss: 162.4356 - mean_absolute_error: 4.4493 - mean_squared_error: 162.4356 - val_loss: 86.5279 - val_mean_absolute_error: 3.0929 - val_mean_squared_error: 86.5279
Epoch 20/100
2002/2002 [==============================] - 2s 1ms/step - loss: 188.0285 - mean_absolute_error: 4.8330 - mean_squared_error: 188.0285 - val_loss: 188.8574 - val_mean_absolute_error: 5.0675 - val_mean_squared_error: 188.8574
Epoch 21/100
2002/2002 [==============================] - 2s 1ms/step - loss: 198.4149 - mean_absolute_error: 4.6248 - mean_squared_error: 198.4149 - val_loss: 89.0413 - val_mean_absolute_error: 3.2837 - val_mean_squared_error: 89.0413
Epoch 22/100
2002/2002 [==============================] - 2s 1ms/step - loss: 225.7476 - mean_absolute_error: 4.5958 - mean_squared_error: 225.7476 - val_loss: 247.9156 - val_mean_absolute_error: 5.1777 - val_mean_squared_error: 247.9156
Epoch 23/100
2002/2002 [==============================] - 2s 1ms/step - loss: 159.9956 - mean_absolute_error: 4.6588 - mean_squared_error: 159.9956 - val_loss: 463.4867 - val_mean_absolute_error: 4.2825 - val_mean_squared_error: 463.4867
Epoch 24/100
2002/2002 [==============================] - 2s 1ms/step - loss: 228.1447 - mean_absolute_error: 4.6139 - mean_squared_error: 228.1447 - val_loss: 74.2598 - val_mean_absolute_error: 3.1006 - val_mean_squared_error: 74.2598
Epoch 25/100
2002/2002 [==============================] - 2s 1ms/step - loss: 156.2160 - mean_absolute_error: 4.2686 - mean_squared_error: 156.2160 - val_loss: 505.8314 - val_mean_absolute_error: 4.2153 - val_mean_squared_error: 505.8314
Epoch 26/100
2002/2002 [==============================] - 3s 1ms/step - loss: 188.1856 - mean_absolute_error: 4.2249 - mean_squared_error: 188.1856 - val_loss: 96.7141 - val_mean_absolute_error: 4.2092 - val_mean_squared_error: 96.7141
Epoch 27/100
2002/2002 [==============================] - 2s 1ms/step - loss: 156.5571 - mean_absolute_error: 4.2015 - mean_squared_error: 156.5571 - val_loss: 486.7256 - val_mean_absolute_error: 9.9037 - val_mean_squared_error: 486.7256
Epoch 28/100
2002/2002 [==============================] - 2s 1ms/step - loss: 174.2863 - mean_absolute_error: 4.5974 - mean_squared_error: 174.2863 - val_loss: 137.6788 - val_mean_absolute_error: 3.3530 - val_mean_squared_error: 137.6788
Epoch 29/100
2002/2002 [==============================] - 2s 1ms/step - loss: 187.0837 - mean_absolute_error: 4.4808 - mean_squared_error: 187.0837 - val_loss: 186.3968 - val_mean_absolute_error: 5.9708 - val_mean_squared_error: 186.3968
Epoch 30/100
2002/2002 [==============================] - 2s 1ms/step - loss: 190.2970 - mean_absolute_error: 4.6111 - mean_squared_error: 190.2970 - val_loss: 139.2308 - val_mean_absolute_error: 4.2062 - val_mean_squared_error: 139.2308
Epoch 31/100
2002/2002 [==============================] - 2s 1ms/step - loss: 165.1758 - mean_absolute_error: 4.2895 - mean_squared_error: 165.1758 - val_loss: 1255.0079 - val_mean_absolute_error: 11.8292 - val_mean_squared_error: 1255.0079
Epoch 32/100
2002/2002 [==============================] - 2s 1ms/step - loss: 190.8052 - mean_absolute_error: 4.2776 - mean_squared_error: 190.8052 - val_loss: 296.5206 - val_mean_absolute_error: 5.0266 - val_mean_squared_error: 296.5206
Epoch 33/100
2002/2002 [==============================] - 2s 1ms/step - loss: 136.5074 - mean_absolute_error: 4.2387 - mean_squared_error: 136.5074 - val_loss: 171.3309 - val_mean_absolute_error: 3.3493 - val_mean_squared_error: 171.3309
Epoch 34/100
2002/2002 [==============================] - 2s 1ms/step - loss: 149.0975 - mean_absolute_error: 4.4460 - mean_squared_error: 149.0975 - val_loss: 262.9336 - val_mean_absolute_error: 6.2375 - val_mean_squared_error: 262.9336
Epoch 35/100
2002/2002 [==============================] - 3s 1ms/step - loss: 124.5192 - mean_absolute_error: 3.9026 - mean_squared_error: 124.5192 - val_loss: 92.4869 - val_mean_absolute_error: 3.1673 - val_mean_squared_error: 92.4869
Epoch 36/100
2002/2002 [==============================] - 2s 1ms/step - loss: 127.8518 - mean_absolute_error: 4.2531 - mean_squared_error: 127.8518 - val_loss: 670.3236 - val_mean_absolute_error: 4.1141 - val_mean_squared_error: 670.3236
Epoch 37/100
2002/2002 [==============================] - 2s 1ms/step - loss: 232.7580 - mean_absolute_error: 4.9287 - mean_squared_error: 232.7580 - val_loss: 72.7001 - val_mean_absolute_error: 3.3568 - val_mean_squared_error: 72.7001
Epoch 38/100
2002/2002 [==============================] - 2s 1ms/step - loss: 143.8063 - mean_absolute_error: 4.0949 - mean_squared_error: 143.8063 - val_loss: 106.4853 - val_mean_absolute_error: 3.2706 - val_mean_squared_error: 106.4853
Epoch 39/100
2002/2002 [==============================] - 2s 1ms/step - loss: 135.9720 - mean_absolute_error: 4.0993 - mean_squared_error: 135.9720 - val_loss: 121.8406 - val_mean_absolute_error: 3.4061 - val_mean_squared_error: 121.8406
Epoch 40/100
2002/2002 [==============================] - 2s 1ms/step - loss: 153.3466 - mean_absolute_error: 4.0241 - mean_squared_error: 153.3466 - val_loss: 166.9334 - val_mean_absolute_error: 3.7598 - val_mean_squared_error: 166.9334
Epoch 41/100
2002/2002 [==============================] - 2s 1ms/step - loss: 164.4472 - mean_absolute_error: 4.3280 - mean_squared_error: 164.4472 - val_loss: 127.2534 - val_mean_absolute_error: 3.1819 - val_mean_squared_error: 127.2534
Epoch 42/100
2002/2002 [==============================] - 2s 1ms/step - loss: 175.4702 - mean_absolute_error: 4.3921 - mean_squared_error: 175.4702 - val_loss: 233.5849 - val_mean_absolute_error: 4.4739 - val_mean_squared_error: 233.5849
Epoch 43/100
2002/2002 [==============================] - 2s 1ms/step - loss: 155.9615 - mean_absolute_error: 3.9674 - mean_squared_error: 155.9615 - val_loss: 71.2719 - val_mean_absolute_error: 3.2623 - val_mean_squared_error: 71.2719
Epoch 44/100
2002/2002 [==============================] - 2s 1ms/step - loss: 162.9647 - mean_absolute_error: 4.2990 - mean_squared_error: 162.9647 - val_loss: 124.7344 - val_mean_absolute_error: 3.6746 - val_mean_squared_error: 124.7344
Epoch 45/100
2002/2002 [==============================] - 2s 1ms/step - loss: 160.4138 - mean_absolute_error: 4.2191 - mean_squared_error: 160.4138 - val_loss: 98.0822 - val_mean_absolute_error: 3.1975 - val_mean_squared_error: 98.0822
Epoch 46/100
2002/2002 [==============================] - 2s 1ms/step - loss: 118.4396 - mean_absolute_error: 4.0750 - mean_squared_error: 118.4396 - val_loss: 160.7507 - val_mean_absolute_error: 4.0760 - val_mean_squared_error: 160.7507
Epoch 47/100
2002/2002 [==============================] - 2s 1ms/step - loss: 153.0122 - mean_absolute_error: 4.2690 - mean_squared_error: 153.0122 - val_loss: 86.5215 - val_mean_absolute_error: 3.3123 - val_mean_squared_error: 86.5215
Epoch 48/100
2002/2002 [==============================] - 2s 1ms/step - loss: 142.7387 - mean_absolute_error: 4.0731 - mean_squared_error: 142.7387 - val_loss: 91.7699 - val_mean_absolute_error: 3.3882 - val_mean_squared_error: 91.7699
Epoch 49/100
2002/2002 [==============================] - 2s 1ms/step - loss: 138.2119 - mean_absolute_error: 3.9644 - mean_squared_error: 138.2119 - val_loss: 159.0529 - val_mean_absolute_error: 3.6686 - val_mean_squared_error: 159.0529
Epoch 50/100
2002/2002 [==============================] - 2s 1ms/step - loss: 139.6376 - mean_absolute_error: 4.0046 - mean_squared_error: 139.6376 - val_loss: 132.3272 - val_mean_absolute_error: 3.5094 - val_mean_squared_error: 132.3272
Epoch 51/100
2002/2002 [==============================] - 2s 1ms/step - loss: 145.3912 - mean_absolute_error: 4.1471 - mean_squared_error: 145.3912 - val_loss: 115.2908 - val_mean_absolute_error: 3.2098 - val_mean_squared_error: 115.2908
Epoch 52/100
2002/2002 [==============================] - 2s 1ms/step - loss: 153.8398 - mean_absolute_error: 4.1385 - mean_squared_error: 153.8398 - val_loss: 123.9368 - val_mean_absolute_error: 3.3894 - val_mean_squared_error: 123.9368
Epoch 53/100
2002/2002 [==============================] - 2s 1ms/step - loss: 149.3236 - mean_absolute_error: 4.1858 - mean_squared_error: 149.3236 - val_loss: 77.4969 - val_mean_absolute_error: 3.1657 - val_mean_squared_error: 77.4969
Epoch 54/100
2002/2002 [==============================] - 2s 1ms/step - loss: 165.3741 - mean_absolute_error: 4.1962 - mean_squared_error: 165.3741 - val_loss: 70.9730 - val_mean_absolute_error: 3.1290 - val_mean_squared_error: 70.9730
Epoch 55/100
2002/2002 [==============================] - 2s 1ms/step - loss: 106.5036 - mean_absolute_error: 3.7398 - mean_squared_error: 106.5036 - val_loss: 236.0446 - val_mean_absolute_error: 3.5709 - val_mean_squared_error: 236.0446
Epoch 56/100
2002/2002 [==============================] - 2s 1ms/step - loss: 161.9167 - mean_absolute_error: 4.1560 - mean_squared_error: 161.9167 - val_loss: 77.0665 - val_mean_absolute_error: 3.1133 - val_mean_squared_error: 77.0665
Epoch 57/100
2002/2002 [==============================] - 2s 1ms/step - loss: 155.7108 - mean_absolute_error: 4.0705 - mean_squared_error: 155.7108 - val_loss: 71.7396 - val_mean_absolute_error: 3.1235 - val_mean_squared_error: 71.7396
Epoch 58/100
2002/2002 [==============================] - 2s 1ms/step - loss: 170.9270 - mean_absolute_error: 3.9898 - mean_squared_error: 170.9270 - val_loss: 169.5397 - val_mean_absolute_error: 6.1083 - val_mean_squared_error: 169.5397
Epoch 59/100
2002/2002 [==============================] - 2s 1ms/step - loss: 115.1434 - mean_absolute_error: 3.7312 - mean_squared_error: 115.1434 - val_loss: 174.9449 - val_mean_absolute_error: 3.4000 - val_mean_squared_error: 174.9449
Epoch 60/100
2002/2002 [==============================] - 2s 1ms/step - loss: 129.2853 - mean_absolute_error: 4.0756 - mean_squared_error: 129.2853 - val_loss: 71.1510 - val_mean_absolute_error: 3.3235 - val_mean_squared_error: 71.1510
Epoch 61/100
2002/2002 [==============================] - 2s 1ms/step - loss: 109.2350 - mean_absolute_error: 3.7734 - mean_squared_error: 109.2350 - val_loss: 123.1669 - val_mean_absolute_error: 3.2137 - val_mean_squared_error: 123.1669
Epoch 62/100
2002/2002 [==============================] - 2s 1ms/step - loss: 168.9439 - mean_absolute_error: 4.0337 - mean_squared_error: 168.9439 - val_loss: 73.9542 - val_mean_absolute_error: 3.1256 - val_mean_squared_error: 73.9542
Epoch 63/100
2002/2002 [==============================] - 2s 1ms/step - loss: 151.9185 - mean_absolute_error: 4.0756 - mean_squared_error: 151.9185 - val_loss: 94.8863 - val_mean_absolute_error: 3.8755 - val_mean_squared_error: 94.8863
Epoch 64/100
2002/2002 [==============================] - 2s 1ms/step - loss: 128.9741 - mean_absolute_error: 3.9888 - mean_squared_error: 128.9741 - val_loss: 211.9258 - val_mean_absolute_error: 3.7072 - val_mean_squared_error: 211.9257
Epoch 65/100
2002/2002 [==============================] - 2s 1ms/step - loss: 178.1642 - mean_absolute_error: 4.3118 - mean_squared_error: 178.1642 - val_loss: 132.6861 - val_mean_absolute_error: 3.9532 - val_mean_squared_error: 132.6861
Epoch 66/100
2002/2002 [==============================] - 2s 1ms/step - loss: 128.4806 - mean_absolute_error: 3.8161 - mean_squared_error: 128.4806 - val_loss: 97.2333 - val_mean_absolute_error: 3.2590 - val_mean_squared_error: 97.2333
Epoch 67/100
2002/2002 [==============================] - 2s 1ms/step - loss: 136.0189 - mean_absolute_error: 4.0207 - mean_squared_error: 136.0189 - val_loss: 111.3006 - val_mean_absolute_error: 3.3953 - val_mean_squared_error: 111.3006
Epoch 68/100
2002/2002 [==============================] - 2s 1ms/step - loss: 222.7781 - mean_absolute_error: 4.1869 - mean_squared_error: 222.7781 - val_loss: 88.1832 - val_mean_absolute_error: 3.2465 - val_mean_squared_error: 88.1832
Epoch 69/100
2002/2002 [==============================] - 2s 1ms/step - loss: 139.0552 - mean_absolute_error: 3.9066 - mean_squared_error: 139.0552 - val_loss: 93.6267 - val_mean_absolute_error: 3.4113 - val_mean_squared_error: 93.6267
Epoch 70/100
2002/2002 [==============================] - 2s 1ms/step - loss: 159.2843 - mean_absolute_error: 3.9919 - mean_squared_error: 159.2843 - val_loss: 80.2807 - val_mean_absolute_error: 3.2949 - val_mean_squared_error: 80.2807
Epoch 71/100
2002/2002 [==============================] - 2s 1ms/step - loss: 149.7096 - mean_absolute_error: 3.8642 - mean_squared_error: 149.7096 - val_loss: 110.7206 - val_mean_absolute_error: 3.2313 - val_mean_squared_error: 110.7206
Epoch 72/100
2002/2002 [==============================] - 2s 1ms/step - loss: 119.1117 - mean_absolute_error: 3.6093 - mean_squared_error: 119.1117 - val_loss: 160.4910 - val_mean_absolute_error: 3.3841 - val_mean_squared_error: 160.4910
Epoch 73/100
2002/2002 [==============================] - 2s 1ms/step - loss: 150.4080 - mean_absolute_error: 4.2191 - mean_squared_error: 150.4080 - val_loss: 162.7778 - val_mean_absolute_error: 3.4348 - val_mean_squared_error: 162.7778
Epoch 74/100
2002/2002 [==============================] - 2s 1ms/step - loss: 142.5308 - mean_absolute_error: 3.6669 - mean_squared_error: 142.5308 - val_loss: 184.0316 - val_mean_absolute_error: 5.9707 - val_mean_squared_error: 184.0316
Epoch 75/100
2002/2002 [==============================] - 2s 1ms/step - loss: 142.5099 - mean_absolute_error: 3.9062 - mean_squared_error: 142.5099 - val_loss: 180.2545 - val_mean_absolute_error: 5.3745 - val_mean_squared_error: 180.2545
Epoch 76/100
2002/2002 [==============================] - 2s 1ms/step - loss: 135.1527 - mean_absolute_error: 3.9863 - mean_squared_error: 135.1527 - val_loss: 246.8391 - val_mean_absolute_error: 6.8365 - val_mean_squared_error: 246.8391
Epoch 77/100
2002/2002 [==============================] - 2s 1ms/step - loss: 179.5900 - mean_absolute_error: 3.7638 - mean_squared_error: 179.5900 - val_loss: 93.0637 - val_mean_absolute_error: 3.2526 - val_mean_squared_error: 93.0637
Epoch 78/100
2002/2002 [==============================] - 2s 1ms/step - loss: 137.8514 - mean_absolute_error: 3.8320 - mean_squared_error: 137.8514 - val_loss: 228.6207 - val_mean_absolute_error: 3.6516 - val_mean_squared_error: 228.6207
Epoch 79/100
2002/2002 [==============================] - 2s 1ms/step - loss: 177.8147 - mean_absolute_error: 4.0813 - mean_squared_error: 177.8147 - val_loss: 112.0948 - val_mean_absolute_error: 3.2341 - val_mean_squared_error: 112.0948
Epoch 80/100
2002/2002 [==============================] - 2s 1ms/step - loss: 137.7508 - mean_absolute_error: 3.7194 - mean_squared_error: 137.7508 - val_loss: 74.0114 - val_mean_absolute_error: 3.1654 - val_mean_squared_error: 74.0114
Epoch 81/100
2002/2002 [==============================] - 2s 1ms/step - loss: 129.1588 - mean_absolute_error: 4.0177 - mean_squared_error: 129.1588 - val_loss: 93.7022 - val_mean_absolute_error: 3.7899 - val_mean_squared_error: 93.7022
Epoch 82/100
2002/2002 [==============================] - 2s 1ms/step - loss: 137.3197 - mean_absolute_error: 4.0555 - mean_squared_error: 137.3197 - val_loss: 89.9699 - val_mean_absolute_error: 3.1262 - val_mean_squared_error: 89.9699
Epoch 83/100
2002/2002 [==============================] - 2s 1ms/step - loss: 136.9547 - mean_absolute_error: 3.8068 - mean_squared_error: 136.9547 - val_loss: 66.2563 - val_mean_absolute_error: 3.2754 - val_mean_squared_error: 66.2563
Epoch 84/100
2002/2002 [==============================] - 2s 1ms/step - loss: 132.3740 - mean_absolute_error: 3.8764 - mean_squared_error: 132.3740 - val_loss: 86.1564 - val_mean_absolute_error: 3.4720 - val_mean_squared_error: 86.1564
Epoch 85/100
2002/2002 [==============================] - 2s 1ms/step - loss: 134.7088 - mean_absolute_error: 3.8404 - mean_squared_error: 134.7088 - val_loss: 80.7178 - val_mean_absolute_error: 3.8065 - val_mean_squared_error: 80.7178
Epoch 86/100
2002/2002 [==============================] - 2s 1ms/step - loss: 160.0683 - mean_absolute_error: 3.9601 - mean_squared_error: 160.0683 - val_loss: 235.4740 - val_mean_absolute_error: 3.4929 - val_mean_squared_error: 235.4740
Epoch 87/100
2002/2002 [==============================] - 2s 1ms/step - loss: 135.8886 - mean_absolute_error: 3.8157 - mean_squared_error: 135.8886 - val_loss: 333.5317 - val_mean_absolute_error: 5.3818 - val_mean_squared_error: 333.5317
Epoch 88/100
2002/2002 [==============================] - 2s 1ms/step - loss: 132.5754 - mean_absolute_error: 3.8100 - mean_squared_error: 132.5754 - val_loss: 122.0154 - val_mean_absolute_error: 3.7463 - val_mean_squared_error: 122.0154
Epoch 89/100
2002/2002 [==============================] - 2s 1ms/step - loss: 122.1743 - mean_absolute_error: 3.8993 - mean_squared_error: 122.1743 - val_loss: 1611.3804 - val_mean_absolute_error: 8.5312 - val_mean_squared_error: 1611.3804
Epoch 90/100
2002/2002 [==============================] - 2s 1ms/step - loss: 144.6203 - mean_absolute_error: 4.0832 - mean_squared_error: 144.6203 - val_loss: 210.6133 - val_mean_absolute_error: 4.6440 - val_mean_squared_error: 210.6133
Epoch 91/100
2002/2002 [==============================] - 2s 1ms/step - loss: 126.7720 - mean_absolute_error: 3.8493 - mean_squared_error: 126.7720 - val_loss: 181.1415 - val_mean_absolute_error: 3.8885 - val_mean_squared_error: 181.1415
Epoch 92/100
2002/2002 [==============================] - 2s 1ms/step - loss: 128.8297 - mean_absolute_error: 3.9355 - mean_squared_error: 128.8297 - val_loss: 125.0691 - val_mean_absolute_error: 3.4337 - val_mean_squared_error: 125.0691
Epoch 93/100
2002/2002 [==============================] - 2s 1ms/step - loss: 151.2018 - mean_absolute_error: 3.6160 - mean_squared_error: 151.2018 - val_loss: 114.1864 - val_mean_absolute_error: 3.1259 - val_mean_squared_error: 114.1864
Epoch 94/100
2002/2002 [==============================] - 2s 1ms/step - loss: 137.0747 - mean_absolute_error: 3.7860 - mean_squared_error: 137.0747 - val_loss: 124.2531 - val_mean_absolute_error: 3.4605 - val_mean_squared_error: 124.2531
Epoch 95/100
2002/2002 [==============================] - 2s 1ms/step - loss: 138.8540 - mean_absolute_error: 3.8862 - mean_squared_error: 138.8540 - val_loss: 85.6273 - val_mean_absolute_error: 3.0965 - val_mean_squared_error: 85.6273
Epoch 96/100
2002/2002 [==============================] - 2s 1ms/step - loss: 141.9713 - mean_absolute_error: 3.7997 - mean_squared_error: 141.9713 - val_loss: 273.1074 - val_mean_absolute_error: 3.7547 - val_mean_squared_error: 273.1074
Epoch 97/100
2002/2002 [==============================] - 2s 1ms/step - loss: 125.9972 - mean_absolute_error: 3.7264 - mean_squared_error: 125.9972 - val_loss: 89.8735 - val_mean_absolute_error: 3.5573 - val_mean_squared_error: 89.8735
Epoch 98/100
2002/2002 [==============================] - 2s 1ms/step - loss: 135.7038 - mean_absolute_error: 3.9225 - mean_squared_error: 135.7038 - val_loss: 269.2707 - val_mean_absolute_error: 3.7771 - val_mean_squared_error: 269.2707
Epoch 99/100
2002/2002 [==============================] - 2s 1ms/step - loss: 121.0244 - mean_absolute_error: 3.7483 - mean_squared_error: 121.0244 - val_loss: 175.1150 - val_mean_absolute_error: 3.5508 - val_mean_squared_error: 175.1150
Epoch 100/100
2002/2002 [==============================] - 2s 1ms/step - loss: 116.5384 - mean_absolute_error: 3.7812 - mean_squared_error: 116.5384 - val_loss: 238.9182 - val_mean_absolute_error: 3.7651 - val_mean_squared_error: 238.9182
196/196 [==============================] - 0s 738us/step
Mean Squared Error (MSE): 99.18093514365631
Mean Absolute Error (MAE): 3.590208475156818
Root Mean Squared Error (RMSE): 9.958962553582392

Linear Regression:
Mean Squared Error (MSE): 76.50775082750076
Mean Absolute Error (MAE): 3.8777274622047972
Root Mean Squared Error (RMSE): 8.746870916362077
In [11]:
# Create a dictionary to store the results
results = {
    'Model': ['ANN', 'Linear Regression'],
    'MSE': [mse, mse_linear],
    'MAE': [mae, mae_linear],
    'RMSE': [rmse, rmse_linear]
}
In [12]:
# Convert the results dictionary to a DataFrame
results_df = pd.DataFrame(results)

# Plot the comparison graph
metrics = ['MSE', 'MAE', 'RMSE']
for metric in metrics:
    plt.figure(figsize=(8, 5))
    plt.bar(results_df['Model'], results_df[metric])
    plt.xlabel('Model')
    plt.ylabel(metric)
    plt.title(f'{metric} Comparison: ANN vs. Linear Regression')
    plt.show()
In [ ]: