DNN sine function fitting

On the basis of the previous blog to do some improvement, sine curve fitting

  • Generate the data

code

from paddle import fluid as fl
import numpy as np
import matplotlib.pyplot as plt

def get_data(x) :
    c,r = x.shape
    y = np.sin(x*3.14) +1+ (0.02* (2*np.random.rand(c,r)-1))
    return(y)

xs = np.arange(0.3.0.01).reshape(-1.1)
ys = get_data(xs)
xs = xs.astype('float32')
ys = ys.astype('float32')

"""plt.title("curve") plt.plot(xs,ys) plt.show()"""
Copy the code

out

  • The complete code

code

from paddle import fluid as fl
import numpy as np
import matplotlib.pyplot as plt

def get_data(x) :
    c,r = x.shape
    y = np.sin(x*3.14) +1+ (0.02* (2*np.random.rand(c,r)-1))
    return(y)

xs = np.arange(0.3.0.01).reshape(-1.1)
ys = get_data(xs)
xs = xs.astype('float32')
ys = ys.astype('float32')

"""plt.title("curve") plt.plot(xs,ys) plt.show()"""

x = fl.layers.data(name="x",shape=[1],dtype="float32")
y = fl.layers.data(name="y",shape=[1],dtype="float32")

l1 = fl.layers.fc(input=x,size=64,act="relu")
#l1 = fl.layers.fc(input=l1,size=16,act="relu")
pre = fl.layers.fc(input=l1,size=1)

loss = fl.layers.mean(
    fl.layers.square_error_cost(input=pre,label=y))

opt = fl.optimizer.Adam(0.1)
opt.minimize(loss)

exe = fl.Executor(
    fl.core.CPUPlace())
exe.run(fl.default_startup_program())

for i in range(1.4001):
    outs = exe.run(
        feed={x.name:xs,y.name:ys},
        fetch_list=[pre.name,loss.name])
    if(i%500= =0) :print(i," steps,loss is",outs[1])


plt.title("sin")
plt.plot(xs,ys)
plt.plot(xs,outs[0])
plt.show()
Copy the code

out

(paddle) C:\Files\DATAs\prjs\python\paddle\demo>C:/Files/APPs/RuanJian/Miniconda3/envs/paddle/python.exe C: / Files/DATAs/PRJS/python/paddle/demo/sin. Py 500 steps, the loss is [0.09414934] 1000 steps, the loss is [0.03732136], 1500 Steps, Loss is [0.01576269] 2000 Steps, Loss is [0.00068113] 2500 Steps, Loss is [0.01121321] 3000 Steps, Loss is [0.00206144] 3500 Steps, Loss is [0.00284895] 4000 Steps, Loss is [0.00058186]Copy the code