[Малый испытательный нож PaddleFluid] Упражнение 2 · Подгонка синусоидальной функции DNN

искусственный интеллект Python

[Малый испытательный нож PaddleFluid] Упражнение 2 · Подгонка синусоидальной функции DNN

существуетПоследняя запись в блогеОсновываясь на некоторых улучшениях, подогнать синусоидальную кривую

  • Генерация данных

code

from paddle import fluid as fl
import numpy as np
import matplotlib.pyplot as plt

def get_data(x):
    c,r = x.shape
    y = np.sin(x*3.14)+1+ (0.02*(2*np.random.rand(c,r)-1))
    return(y)

xs = np.arange(0,3,0.01).reshape(-1,1)
ys = get_data(xs)
xs = xs.astype('float32')
ys = ys.astype('float32')

"""plt.title("curve")
plt.plot(xs,ys)

plt.show()"""

out
在这里插入图片描述

  • полный код

code

from paddle import fluid as fl
import numpy as np
import matplotlib.pyplot as plt

def get_data(x):
    c,r = x.shape
    y = np.sin(x*3.14)+1+ (0.02*(2*np.random.rand(c,r)-1))
    return(y)

xs = np.arange(0,3,0.01).reshape(-1,1)
ys = get_data(xs)
xs = xs.astype('float32')
ys = ys.astype('float32')

"""plt.title("curve")
plt.plot(xs,ys)

plt.show()"""

x = fl.layers.data(name="x",shape=[1],dtype="float32")
y = fl.layers.data(name="y",shape=[1],dtype="float32")

l1 = fl.layers.fc(input=x,size=64,act="relu")
#l1 = fl.layers.fc(input=l1,size=16,act="relu")
pre = fl.layers.fc(input=l1,size=1)

loss = fl.layers.mean(
    fl.layers.square_error_cost(input=pre,label=y))

opt = fl.optimizer.Adam(0.1)
opt.minimize(loss)

exe = fl.Executor(
    fl.core.CPUPlace())
exe.run(fl.default_startup_program())

for i in range(1,4001):
    outs = exe.run(
        feed={x.name:xs,y.name:ys},
        fetch_list=[pre.name,loss.name])
    if(i%500==0):
        print(i," steps,loss is",outs[1])


plt.title("sin")
plt.plot(xs,ys)
plt.plot(xs,outs[0])
plt.show()

out
在这里插入图片描述

(paddle) C:\Files\DATAs\prjs\python\paddle\demo>C:/Files/APPs/RuanJian/Miniconda3/envs/paddle/python.exe c:/Files/DATAs/prjs/python/paddle/demo/sin.py
500  steps,loss is [0.09414934]
1000  steps,loss is [0.03732136]
1500  steps,loss is [0.01576269]
2000  steps,loss is [0.00068113]
2500  steps,loss is [0.01121321]
3000  steps,loss is [0.00206144]
3500  steps,loss is [0.00284895]
4000  steps,loss is [0.00058186]