tensorflow拟合曲线

本文通过使用TensorFlow实现单值法和向量法两种方式的曲线拟合任务,展示了如何利用神经网络逼近复杂函数的过程。实验中采用正弦函数作为训练目标,并通过调整权重参数来最小化损失函数。

单值法:

import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

# Prepare train data
train_X = np.linspace(0, np.pi, 100)
train_Y=np.sin(train_X)
# Define the model
X1 = tf.placeholder("float")
X2 = tf.placeholder("float")
X3 = tf.placeholder("float")
X4 = tf.placeholder("float")
Y = tf.placeholder("float")

w1 = tf.Variable(0.0, name="weight1")
w2 = tf.Variable(0.0, name="weight2")
w3 = tf.Variable(0.0, name="weight3")
w4 = tf.Variable(0.0, name="weight4")
b = tf.Variable(0.0, name="bias")

y1=w1*X1+(w2)*(X2)**3+w3*(X3)**5+(w4)*(X4)**7
loss = tf.reduce_mean(tf.square(Y - y1))
c1=[]
c2=[]
c3=[]
c4=[]
loss1=[]
#
#loss = tf.reduce_mean(tf.square(Y - w1*X1 -(w2)*(X2)**3-(w3)*(X3)**5))
optimizer =tf.train.AdamOptimizer()
train_op=optimizer.minimize(loss)

## Create session to run
with tf.Session() as sess:
    init=tf.global_variables_initializer()
    sess.run(init)
    epoch = 1
    for i in range(100):
        for (x, y) in zip(train_X, train_Y):         
            _,ww1, ww2,ww3,ww4,loss_= sess.run([train_op, w1, w2,w3,w4,loss],feed_dict={X1:x,X2:x,X3:x,X4:x,Y: y})
        print("Epoch: {}, w1: {}, w2: {},w3:{},loss:{}".format(epoch, ww1,ww2,ww3,loss_))
        epoch += 1
        c1=c1+[ww1]
        c2=c2+[ww2]
        c3=c3+[ww3]
        c4=c4+[ww4]
        loss1=loss1+[loss_]
plt.plot(train_X,train_Y,"+")
plt.plot(train_X,ww1*train_X+(ww2)*(train_X**3)+ww3*(train_X**5)+ww4*(train_X**7))
plt.show()

向量法:

import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt

# Prepare train data
train_X = np.linspace(0, np.pi, 100)
train_Y=np.sin(train_X)
# Define the model
X1 = tf.placeholder(tf.float32,shape=(100,))
X2 = tf.placeholder(tf.float32,shape=(100,))
X3 = tf.placeholder(tf.float32,shape=(100,))
X4 = tf.placeholder(tf.float32,shape=(100,))
Y = tf.placeholder(tf.float32,shape=(100,))
w1 = tf.Variable(0.0,name="weight1")
w2 = tf.Variable(0.0, name="weight2")
w3 = tf.Variable(0.0, name="weight3")
w4 = tf.Variable(0.0, name="weight4")

y1=w1*X1+w2*X2+w3*X3+w4*X4
loss = tf.reduce_mean(tf.square(Y - y1))


optimizer =tf.train.AdamOptimizer()

train_op=optimizer.minimize(loss)

## Create session to run
with tf.Session() as sess:
    init=tf.global_variables_initializer()
    sess.run(init)
    epoch = 1
    for i in range(10000):
        _,ww1, ww2,ww3,ww4,loss_= sess.run([train_op, w1, w2,w3,w4,loss],feed_dict={X1:train_X,X2:train_X**3,X3:train_X**5,X4:train_X**7,Y: train_Y})
        print("Epoch: {}, w1: {}, w2: {},w3:{},w4:{},loss:{}".format(epoch, ww1,ww2,ww3,ww4,loss_))
        epoch += 1
plt.plot(train_X,train_Y,"+",label='data')
plt.plot(train_X,ww1*train_X+(ww2)*(train_X**3)+ww3*(train_X**5)+ww4*(train_X**7),label='curve')
plt.savefig('1.png',dpi=200)
plt.axis([0,np.pi,-2,2])
plt.legend(loc=1)
plt.show()

效果图

效果图

### 过拟合曲线的定义与表现 过拟合曲线是指在深度学习模型训练过程中,用于描述模型性能随训练轮数(epoch)变化的曲线。在训练初期,模型的损失函数值(如训练损失)会随着训练轮数的增加而迅速下降,模型的准确率(如训练准确率)会快速上升。然而,当模型开始过拟合时,训练损失会持续下降,训练准确率持续上升,但验证损失会停止下降并开始上升,验证准确率也会停止上升甚至下降。这就形成了训练集和验证集性能曲线的分离,这种分离的曲线特征就是过拟合曲线的典型表现。 ### 过拟合曲线的示例代码(使用Keras和Matplotlib) ```python import tensorflow as tf from tensorflow.keras.datasets import mnist from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense import matplotlib.pyplot as plt # 加载MNIST数据集 (x_train, y_train), (x_test, y_test) = mnist.load_data() # 数据预处理 x_train = x_train.reshape(-1, 28 * 28) / 255.0 x_test = x_test.reshape(-1, 28 * 28) / 255.0 y_train = tf.keras.utils.to_categorical(y_train, 10) y_test = tf.keras.utils.to_categorical(y_test, 10) # 构建一个简单的神经网络模型 model = Sequential([ Dense(512, activation='relu', input_shape=(28 * 28,)), Dense(256, activation='relu'), Dense(10, activation='softmax') ]) # 编译模型 model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) # 训练模型 history = model.fit(x_train, y_train, epochs=20, batch_size=128, validation_data=(x_test, y_test)) # 绘制训练损失和验证损失曲线 plt.figure(figsize=(12, 4)) plt.subplot(1, 2, 1) plt.plot(history.history['loss'], label='Training Loss') plt.plot(history.history['val_loss'], label='Validation Loss') plt.xlabel('Epochs') plt.ylabel('Loss') plt.legend() # 绘制训练准确率和验证准确率曲线 plt.subplot(1, 2, 2) plt.plot(history.history['accuracy'], label='Training Accuracy') plt.plot(history.history['val_accuracy'], label='Validation Accuracy') plt.xlabel('Epochs') plt.ylabel('Accuracy') plt.legend() plt.show() ``` ### 过拟合曲线的分析与应用 通过观察过拟合曲线,可以判断模型是否过拟合以及过拟合的严重程度。如果验证损失在训练早期就开始上升,说明模型可能过拟合得比较严重;如果在训练后期才出现上升,说明过拟合相对较轻。根据过拟合曲线的特征,可以调整模型的训练策略,如提前停止训练(Early Stopping),即在验证损失开始上升时停止训练,避免模型进一步过拟合
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值