่ฟ้ไธๆฏ็ๆญฃ็ๆๆจ่๏ผๅชๆฏๅฐ่ฏๆญๅปบไธไธ wide & deep ็ๆจกๅ็ปๆใ
import matplotlib as mpl
import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
import sklearn
import pandas as pd
import os
import sys
import time
import tensorflow as tf
from tensorflow import keras
print(tf.__version__)
print(sys.version_info)
for module in mpl, np, pd, sklearn, tf, keras:
print(module.__name__, module.__version__)
2025-05-23 12:42:51.714036: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. 2025-05-23 12:42:51.722264: I external/local_xla/xla/tsl/cuda/cudart_stub.cc:32] Could not find cuda drivers on your machine, GPU will not be used. 2025-05-23 12:42:51.792226: I external/local_xla/xla/tsl/cuda/cudart_stub.cc:32] Could not find cuda drivers on your machine, GPU will not be used. 2025-05-23 12:42:51.862966: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered WARNING: All log messages before absl::InitializeLog() is called are written to STDERR E0000 00:00:1747975371.919316 54527 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered E0000 00:00:1747975371.937180 54527 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered W0000 00:00:1747975372.062499 54527 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once. W0000 00:00:1747975372.062542 54527 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once. W0000 00:00:1747975372.062549 54527 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once. W0000 00:00:1747975372.062552 54527 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once. 2025-05-23 12:42:52.079978: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
2.19.0 sys.version_info(major=3, minor=11, micro=2, releaselevel='final', serial=0) matplotlib 3.10.3 numpy 2.1.3 pandas 2.2.3 sklearn 1.6.1 tensorflow 2.19.0 keras._tf_keras.keras 3.9.2
from sklearn.datasets import fetch_california_housing
housing = fetch_california_housing()
print(housing.DESCR)
print(housing.data.shape)
print(housing.target.shape)
.. _california_housing_dataset: California Housing dataset -------------------------- **Data Set Characteristics:** :Number of Instances: 20640 :Number of Attributes: 8 numeric, predictive attributes and the target :Attribute Information: - MedInc median income in block group - HouseAge median house age in block group - AveRooms average number of rooms per household - AveBedrms average number of bedrooms per household - Population block group population - AveOccup average number of household members - Latitude block group latitude - Longitude block group longitude :Missing Attribute Values: None This dataset was obtained from the StatLib repository. https://www.dcc.fc.up.pt/~ltorgo/Regression/cal_housing.html The target variable is the median house value for California districts, expressed in hundreds of thousands of dollars ($100,000). This dataset was derived from the 1990 U.S. census, using one row per census block group. A block group is the smallest geographical unit for which the U.S. Census Bureau publishes sample data (a block group typically has a population of 600 to 3,000 people). A household is a group of people residing within a home. Since the average number of rooms and bedrooms in this dataset are provided per household, these columns may take surprisingly large values for block groups with few households and many empty houses, such as vacation resorts. It can be downloaded/loaded using the :func:`sklearn.datasets.fetch_california_housing` function. .. rubric:: References - Pace, R. Kelley and Ronald Barry, Sparse Spatial Autoregressions, Statistics and Probability Letters, 33 (1997) 291-297 (20640, 8) (20640,)
from sklearn.model_selection import train_test_split
x_train_all, x_test, y_train_all, y_test = train_test_split(
housing.data, housing.target, random_state = 7)
x_train, x_valid, y_train, y_valid = train_test_split(
x_train_all, y_train_all, random_state = 11)
print(x_train.shape, y_train.shape)
print(x_valid.shape, y_valid.shape)
print(x_test.shape, y_test.shape)
(11610, 8) (11610,) (3870, 8) (3870,) (5160, 8) (5160,)
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
x_train_scaled = scaler.fit_transform(x_train)
x_valid_scaled = scaler.transform(x_valid)
x_test_scaled = scaler.transform(x_test)
print(x_train.shape[1:])
(8,)
็ฎๅ็ๆจกๅ็คบๆ๏ผ
# ่ฟ้จๅๅจๆญๅปบdeepๆจกๅ
# ๅฝๆฐๅผAPI ๅ่ฝAPI,ๅไนๅไธไธๆ ท
input = keras.layers.Input(shape=x_train.shape[1:])
print(input)
# inputไฝไธบ่พๅ
ฅ
hidden1 = keras.layers.Dense(30, activation='relu')(input)
#hidden1ไฝไธบ่พๅ
ฅ
hidden2 = keras.layers.Dense(30, activation='relu')(hidden1)
# ๅคๅๅฝๆฐ: f(x) = h(g(x))
concat = keras.layers.concatenate([input, hidden2])
output = keras.layers.Dense(1)(concat)
#็ถๅๅฎไนmodel๏ผๆพๅ
ฅinput๏ผoutput
model = keras.models.Model(inputs = [input],
outputs = [output])
model.summary()
model.compile(loss="mean_squared_error",
optimizer = keras.optimizers.SGD(0.001))
callbacks = [keras.callbacks.EarlyStopping(
patience=5, min_delta=1e-3)]
<KerasTensor shape=(None, 8), dtype=float32, sparse=False, ragged=False, name=keras_tensor>
2025-05-23 12:42:53.857239: E external/local_xla/xla/stream_executor/cuda/cuda_platform.cc:51] failed call to cuInit: INTERNAL: CUDA error: Failed call to cuInit: UNKNOWN ERROR (303)
Model: "functional"
โโโโโโโโโโโโโโโโโโโโโโโณโโโโโโโโโโโโโโโโโโโโณโโโโโโโโโโโโโณโโโโโโโโโโโโโโโโโโโโ โ Layer (type) โ Output Shape โ Param # โ Connected to โ โกโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฉ โ input_layer โ (None, 8) โ 0 โ - โ โ (InputLayer) โ โ โ โ โโโโโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโค โ dense (Dense) โ (None, 30) โ 270 โ input_layer[0][0] โ โโโโโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโค โ dense_1 (Dense) โ (None, 30) โ 930 โ dense[0][0] โ โโโโโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโค โ concatenate โ (None, 38) โ 0 โ input_layer[0][0โฆ โ โ (Concatenate) โ โ โ dense_1[0][0] โ โโโโโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโผโโโโโโโโโโโโโผโโโโโโโโโโโโโโโโโโโโค โ dense_2 (Dense) โ (None, 1) โ 39 โ concatenate[0][0] โ โโโโโโโโโโโโโโโโโโโโโโโดโโโโโโโโโโโโโโโโโโโโดโโโโโโโโโโโโโดโโโโโโโโโโโโโโโโโโโโ
Total params: 1,239 (4.84 KB)
Trainable params: 1,239 (4.84 KB)
Non-trainable params: 0 (0.00 B)
model.layers
[<InputLayer name=input_layer, built=True>, <Dense name=dense, built=True>, <Dense name=dense_1, built=True>, <Concatenate name=concatenate, built=True>, <Dense name=dense_2, built=True>]
history = model.fit(x_train_scaled, y_train,
validation_data = (x_valid_scaled, y_valid),
epochs = 100,
callbacks = callbacks)
Epoch 1/100
/home/zhiyue/Documents/myvenv/tf/lib/python3.11/site-packages/keras/src/models/functional.py:238: UserWarning: The structure of `inputs` doesn't match the expected structure. Expected: ['keras_tensor'] Received: inputs=Tensor(shape=(None, 8)) warnings.warn(msg)
363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 2.9595 - val_loss: 0.8802 Epoch 2/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.7197 - val_loss: 0.6953 Epoch 3/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.6419 - val_loss: 0.6567 Epoch 4/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 3ms/step - loss: 0.5958 - val_loss: 0.6254 Epoch 5/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 3ms/step - loss: 0.5722 - val_loss: 0.6058 Epoch 6/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.5582 - val_loss: 0.5832 Epoch 7/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.5394 - val_loss: 0.5652 Epoch 8/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 3ms/step - loss: 0.5188 - val_loss: 0.5534 Epoch 9/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.5140 - val_loss: 0.5437 Epoch 10/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.5045 - val_loss: 0.5333 Epoch 11/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4951 - val_loss: 0.5251 Epoch 12/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 3ms/step - loss: 0.4996 - val_loss: 0.5182 Epoch 13/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4882 - val_loss: 0.5102 Epoch 14/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4756 - val_loss: 0.5045 Epoch 15/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 1ms/step - loss: 0.4673 - val_loss: 0.4974 Epoch 16/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4602 - val_loss: 0.4919 Epoch 17/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4665 - val_loss: 0.4887 Epoch 18/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4675 - val_loss: 0.4831 Epoch 19/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4505 - val_loss: 0.4810 Epoch 20/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4379 - val_loss: 0.4757 Epoch 21/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4539 - val_loss: 0.4714 Epoch 22/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4624 - val_loss: 0.4673 Epoch 23/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4597 - val_loss: 0.4640 Epoch 24/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4498 - val_loss: 0.4601 Epoch 25/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4520 - val_loss: 0.4580 Epoch 26/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4297 - val_loss: 0.4541 Epoch 27/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4133 - val_loss: 0.4512 Epoch 28/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4283 - val_loss: 0.4480 Epoch 29/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4194 - val_loss: 0.4461 Epoch 30/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4142 - val_loss: 0.4433 Epoch 31/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4247 - val_loss: 0.4400 Epoch 32/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4124 - val_loss: 0.4385 Epoch 33/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4299 - val_loss: 0.4356 Epoch 34/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4245 - val_loss: 0.4334 Epoch 35/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4124 - val_loss: 0.4308 Epoch 36/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4087 - val_loss: 0.4288 Epoch 37/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3971 - val_loss: 0.4269 Epoch 38/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4053 - val_loss: 0.4247 Epoch 39/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4042 - val_loss: 0.4230 Epoch 40/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4236 - val_loss: 0.4207 Epoch 41/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4019 - val_loss: 0.4183 Epoch 42/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 1ms/step - loss: 0.3794 - val_loss: 0.4164 Epoch 43/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.4030 - val_loss: 0.4151 Epoch 44/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3940 - val_loss: 0.4141 Epoch 45/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3786 - val_loss: 0.4112 Epoch 46/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3957 - val_loss: 0.4116 Epoch 47/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3716 - val_loss: 0.4088 Epoch 48/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3769 - val_loss: 0.4073 Epoch 49/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3772 - val_loss: 0.4068 Epoch 50/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3823 - val_loss: 0.4047 Epoch 51/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 1ms/step - loss: 0.3944 - val_loss: 0.4038 Epoch 52/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3752 - val_loss: 0.4020 Epoch 53/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3737 - val_loss: 0.4006 Epoch 54/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3911 - val_loss: 0.3998 Epoch 55/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3894 - val_loss: 0.3984 Epoch 56/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3752 - val_loss: 0.3961 Epoch 57/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3888 - val_loss: 0.3962 Epoch 58/100 363/363 โโโโโโโโโโโโโโโโโโโโ 0s 1ms/step - loss: 0.3812 - val_loss: 0.3945 Epoch 59/100 363/363 โโโโโโโโโโโโโโโโโโโโ 0s 1ms/step - loss: 0.3997 - val_loss: 0.3931 Epoch 60/100 363/363 โโโโโโโโโโโโโโโโโโโโ 0s 1ms/step - loss: 0.3698 - val_loss: 0.3929 Epoch 61/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3661 - val_loss: 0.3918 Epoch 62/100 363/363 โโโโโโโโโโโโโโโโโโโโ 0s 1ms/step - loss: 0.3670 - val_loss: 0.3912 Epoch 63/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3669 - val_loss: 0.3892 Epoch 64/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3815 - val_loss: 0.3891 Epoch 65/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3719 - val_loss: 0.3876 Epoch 66/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3652 - val_loss: 0.3870 Epoch 67/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3771 - val_loss: 0.3875 Epoch 68/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3677 - val_loss: 0.3849 Epoch 69/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3525 - val_loss: 0.3856 Epoch 70/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3753 - val_loss: 0.3849 Epoch 71/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3578 - val_loss: 0.3837 Epoch 72/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3764 - val_loss: 0.3837 Epoch 73/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3815 - val_loss: 0.3823 Epoch 74/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3610 - val_loss: 0.3808 Epoch 75/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3586 - val_loss: 0.3809 Epoch 76/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3519 - val_loss: 0.3791 Epoch 77/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3519 - val_loss: 0.3790 Epoch 78/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3674 - val_loss: 0.3778 Epoch 79/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 3ms/step - loss: 0.3498 - val_loss: 0.3773 Epoch 80/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 3ms/step - loss: 0.3680 - val_loss: 0.3775 Epoch 81/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 3ms/step - loss: 0.3552 - val_loss: 0.3762 Epoch 82/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3728 - val_loss: 0.3760 Epoch 83/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3615 - val_loss: 0.3751 Epoch 84/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3759 - val_loss: 0.3752 Epoch 85/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3628 - val_loss: 0.3748 Epoch 86/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 3ms/step - loss: 0.3661 - val_loss: 0.3731 Epoch 87/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3482 - val_loss: 0.3727 Epoch 88/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3462 - val_loss: 0.3720 Epoch 89/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 3ms/step - loss: 0.3620 - val_loss: 0.3717 Epoch 90/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3493 - val_loss: 0.3719 Epoch 91/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3523 - val_loss: 0.3713 Epoch 92/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3575 - val_loss: 0.3701 Epoch 93/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3475 - val_loss: 0.3698 Epoch 94/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3531 - val_loss: 0.3690 Epoch 95/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3535 - val_loss: 0.3687 Epoch 96/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3491 - val_loss: 0.3677 Epoch 97/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3618 - val_loss: 0.3688 Epoch 98/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3527 - val_loss: 0.3678 Epoch 99/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3415 - val_loss: 0.3685 Epoch 100/100 363/363 โโโโโโโโโโโโโโโโโโโโ 1s 2ms/step - loss: 0.3428 - val_loss: 0.3675
print(history.history)
{'loss': [1.9442572593688965, 0.6913352608680725, 0.6317581534385681, 0.5972666144371033, 0.5714230537414551, 0.5534383058547974, 0.535968542098999, 0.5252794027328491, 0.5134387612342834, 0.5036860108375549, 0.49601900577545166, 0.48927369713783264, 0.4828236997127533, 0.4768630266189575, 0.4713060259819031, 0.46683475375175476, 0.46260249614715576, 0.45756420493125916, 0.4538637399673462, 0.44985923171043396, 0.4461941719055176, 0.4428238272666931, 0.4397617280483246, 0.43653327226638794, 0.4336789548397064, 0.43060487508773804, 0.42828431725502014, 0.42557764053344727, 0.4227480888366699, 0.4207237660884857, 0.41830214858055115, 0.415844202041626, 0.4137595295906067, 0.41187405586242676, 0.4097575545310974, 0.4081140160560608, 0.40589314699172974, 0.40427908301353455, 0.402235746383667, 0.4005247950553894, 0.39930883049964905, 0.39725461602211, 0.39584654569625854, 0.3939233720302582, 0.39237385988235474, 0.39076533913612366, 0.3893541991710663, 0.3885306119918823, 0.3869497776031494, 0.38506102561950684, 0.3840053081512451, 0.3829934000968933, 0.3817461133003235, 0.3802415728569031, 0.37931784987449646, 0.3782307207584381, 0.3774219751358032, 0.37668508291244507, 0.3756830394268036, 0.37428876757621765, 0.3734828233718872, 0.37263214588165283, 0.3714740574359894, 0.3709079921245575, 0.37050142884254456, 0.3696255385875702, 0.36843088269233704, 0.3678511083126068, 0.3673286736011505, 0.36620596051216125, 0.36584988236427307, 0.36545008420944214, 0.3645966947078705, 0.3635362386703491, 0.36297258734703064, 0.362659215927124, 0.361533522605896, 0.3611658215522766, 0.36051324009895325, 0.3601018190383911, 0.3593995273113251, 0.3583862781524658, 0.35829687118530273, 0.3574381470680237, 0.3572714924812317, 0.3564000725746155, 0.3556089699268341, 0.3548389971256256, 0.3546738028526306, 0.35405421257019043, 0.35344165563583374, 0.3525455892086029, 0.3523666560649872, 0.352120041847229, 0.3513154983520508, 0.3505692780017853, 0.34989097714424133, 0.3501761853694916, 0.34951359033584595, 0.3489224910736084], 'val_loss': [0.8802490234375, 0.6953044533729553, 0.656663715839386, 0.6253547668457031, 0.6057745814323425, 0.5831989645957947, 0.5652101039886475, 0.5533563494682312, 0.5437381863594055, 0.5332683324813843, 0.5251314043998718, 0.518224835395813, 0.510202169418335, 0.5045297741889954, 0.4973835051059723, 0.4918815493583679, 0.48867565393447876, 0.4831474721431732, 0.48095473647117615, 0.4757136106491089, 0.47138169407844543, 0.46733489632606506, 0.4639500379562378, 0.46007099747657776, 0.45795804262161255, 0.4540519118309021, 0.4512031674385071, 0.447951078414917, 0.44610485434532166, 0.44329336285591125, 0.4400191009044647, 0.43850427865982056, 0.4356461465358734, 0.433396577835083, 0.4307943880558014, 0.4287586212158203, 0.42685794830322266, 0.42466285824775696, 0.4229873716831207, 0.42072227597236633, 0.41834545135498047, 0.4164070188999176, 0.41507887840270996, 0.41407766938209534, 0.4111602306365967, 0.41160276532173157, 0.40883052349090576, 0.4073232412338257, 0.4067905843257904, 0.40465250611305237, 0.4037553071975708, 0.4019768536090851, 0.4006170630455017, 0.39982715249061584, 0.3984098434448242, 0.3960675895214081, 0.39619606733322144, 0.39449384808540344, 0.3931092917919159, 0.3929012715816498, 0.39182353019714355, 0.391218900680542, 0.389157772064209, 0.38912394642829895, 0.38763949275016785, 0.38704022765159607, 0.38750824332237244, 0.3849483132362366, 0.38561755418777466, 0.3849416673183441, 0.3837478756904602, 0.383711040019989, 0.3823179602622986, 0.380781352519989, 0.38092678785324097, 0.37907177209854126, 0.37901827692985535, 0.37776094675064087, 0.3773316442966461, 0.3774847984313965, 0.376169890165329, 0.3760264813899994, 0.3750779628753662, 0.37517932057380676, 0.37482428550720215, 0.3730897605419159, 0.3727302551269531, 0.37198606133461, 0.3716859817504883, 0.37189626693725586, 0.3713308572769165, 0.37005218863487244, 0.3697512745857239, 0.3690272271633148, 0.36869555711746216, 0.36770763993263245, 0.36876562237739563, 0.367801696062088, 0.3685286045074463, 0.3674905300140381]}
def plot_learning_curves(history):
pd.DataFrame(history.history).plot(figsize=(8, 5))
plt.grid(True)
plt.gca().set_ylim(0, 2.5)
plt.show()
plot_learning_curves(history)
# ไธๆฏ่ฟไธชๆจกๅไธๅฅฝ๏ผ่ๆฏๆญๅปบ็ๆฏ่พ็ฎๅ
model.evaluate(x_test_scaled, y_test, verbose=0)
0.37196114659309387