PRP 共轭梯度法
PRP共軛梯度法對于正定的二次函數而言和FR共軛梯度法效果是近似的,而對于一般的函數來說的話,PRP算法一般優于FR算法
?
from 實用優化算法.helper import* # 單獨博客介紹 import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3Dx1,x2,a = symbols('x1,x2,a')k = 0def get_f():# f = pow(1 - x1,2) + 2*pow(x2 - pow(x1,2),2)# f = (3/2) * pow(x1,2) + pow(x2,2)/2 - x1*x2 - 2*x1f = 100 * (x2 - x1 ** 2) ** 2 + (1 - x1) ** 2 # Rosenbrock函數return fdef do_work(x):g = get_grad(x,get_f())g1 = gd = -gk = 0cnt = 0steps_x1 = [x[0][0]]steps_x2 = [x[1][0]]while get_len_grad(g1) >= 0.00001:bt = get_biet_PRP(k,g,g1)d = -g1 + bt * dstep = non_accuracy_search(x,d,get_f()) # Armijo 非精確搜索# step = golden_search(0,2,x,d,get_f()) # 調用實驗一的黃金分割法進行精確搜索next_x = x + step*dsteps_x1.append(next_x[0][0])steps_x2.append(next_x[1][0])k += 1g = g1g1 = get_grad(next_x,get_f())x = next_xcnt += 1print(cnt,' ',x,',',end = '\n\n')print('\n','最終結果x*:',x,'\n','f(x*):',get_f().subs(x1,x[0][0]).subs(x2,x[1][0]))return steps_x1,steps_x2if __name__ == '__main__':x0 = [[0],[0]]x0 = np.array(x0)step_x1,step_x2 = do_work(x0)fig = plt.figure()ax = Axes3D(fig)x = np.arange(-3, 3, 0.1)y = np.arange(-3, 3, 0.1)X1, X2 = np.meshgrid(x, y)plt.figure(figsize=(10, 6))Z = (X1 ** 4)/4 + (X2 ** 2)/2 - X1*X2 + X1 - X2step_x1 = np.array(step_x1)step_x2 = np.array(step_x2)step_x3 = (step_x1 ** 4) / 4 + (step_x2 ** 2) / 2 - step_x1 * step_x2 + step_x1 - step_x2bar = plt.contourf(X1, X2, Z, 5, cmap=plt.cm.Blues)plt.plot(step_x1, step_x2, marker='*')ax.plot_surface(X1, X2, Z, rstride=1, cstride=1)ax.plot(step_x1, step_x2, step_x3, c='r',marker = '*')plt.colorbar(bar)plt.show()?
總結
- 上一篇: 如何用python打印田字格_如何用 3
- 下一篇: 数据结构:单链表操作之如何判断链表是否带