线性回归,二维

xiaoxiao2021-02-28  118

# -*- coding: utf-8 -*- """ Created on Wed Aug 30 15:27:51 2017 @author: liuxy """ import numpy as np import matplotlib.pyplot as plt def gen_data(size): x1 = np.arange(0, size, 1) x2 = np.arange(0, size, 1) e = np.random.normal(0, 3, size) y = 2*x1 + x2 + e return np.array([x1, x2, y]) def compute_gradient_full(data, w): X1 = data[0] X2 = data[1] Y = data[2] N = len(Y) g1 = np.sum(2*X1*2*(X1*w[0] - Y))/N g2 = np.sum(2*X2*(X2*w[1] - Y))/N return np.array([g1, g2]) def compute_gradient_sgd(data, w): X1 = data[0] X2 = data[1] Y = data[2] idx = np.random.randint(0, len(Y)-1); x1 = X1[idx] x2 = X2[idx] y = Y[idx] g1 = 2 g1 = 2*x1*2*(x1*w[0] - y) g2 = 2*x2*(x2*w[1] - y) return np.array([g1, g2]) def Optimizer(data, learning_rate, num_iterator, Wts, method): w = Wts[0] for i in range(num_iterator): if ('full' == method): g = compute_gradient_full(data, w) if ('sgd' == method): g = compute_gradient_sgd(data, w) w = w - learning_rate * g Wts.append(w) data = gen_data(100) lr = 0.00005 w = [80,59] num = 100 Weights_full = [] Weights_full.append(w) Optimizer(data, lr, num, Weights_full, 'sgd') w1 = np.arange(0, 100, 0.2) w2 = np.arange(0, 100, 0.2) W1, W2 = np.meshgrid(w1, w2) X1 = data[0] X2 = data[1] Y = data[2] Z = [] for i in range(len(w1)): ls = [] for j in range(len(w2)): L = (np.sum((X1*w1[i] + X2*w2[j] - Y)**2))/len(Y) ls.append(L) Z.append(ls) Zs = np.array(Z) plt.contour(W1, W2, Zs) WWW = np.array(Weights_full) plt.scatter(WWW[:,0], WWW[:,1]) plt.plot(WWW[:,0], WWW[:,1])

上图分别为 full batch 与 sgd 的效果图

转载请注明原文地址: https://www.6miu.com/read-35060.html

最新回复(0)