机器学习Sklearn实战——手写线性回归
生活随笔
收集整理的這篇文章主要介紹了
机器学习Sklearn实战——手写线性回归
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
手寫線性回歸
import numpy as np import matplotlib.pyplot as plt from sklearn.linear_model import LinearRegression X = np.linspace(2,10,20).reshape(-1,1) y = np.random.randint(1,6,size = 1)*X + np.random.randint(-5,5,size = 1) #噪聲 加鹽 y += np.random.randn(20,1)*0.8 plt.scatter(X,y,color = "red")w = lr.coef_[0,0] b = lr.intercept_[0] print(w,b) plt.scatter(X,y) x = np.linspace(1,11,50) plt.plot(x,w*x + b,color= "green")結果:
2.995391527138711 1.9801931425932864 import numpy as np import matplotlib.pyplot as plt from sklearn.linear_model import LinearRegressionX = np.linspace(2, 10, 20).reshape(-1,1) # f(x) = wx + b y = np.random.randint(1, 6, size=1)*X + np.random.randint(-5, 5, size=1) # 噪聲,加鹽 y += np.random.randn(20, 1)*0.8plt.scatter(X, y, color = 'red')w = lr.coef_[0, 0] b = lr.intercept_[0]x = np.linspace(1, 11, 50) plt.plot(x, w*x + b, color='green')# 使用梯度下降解決一元一次的線性問題:w,b class LinearModel(object):def __init__(self):self.w = np.random.randn(1)[0]self.b = np.random.randn(1)[0]# 數學建模:將數據X和目標值關系用數學公式表達def model(self,x):#model 模型,f(x) = wx + breturn self.w*x + self.bdef loss(self,x,y):#最小二乘cost = (y - self.model(x))**2# 梯度就是偏導數,求解兩個未知數:w,bgradient_w = 2*(y - self.model(x))*(-x)gradient_b = 2*(y - self.model(x))*(-1)return cost,gradient_w,gradient_b# 梯度下降def gradient_descent(self,gradient_w,gradient_b,learning_rate = 0.1):# 更新w,bself.w -= gradient_w*learning_rateself.b -= gradient_b*learning_rate# 訓練fitdef fit(self,X,y):count = 0 #算法執行優化了3000次,退出tol = 0.0001last_w = self.w + 0.1last_b = self.b + 0.1length = len(X)while True:if count > 3000:#執行的次數到了break# 求解的斜率和截距的精確度達到要求if (abs(last_w - self.w) < tol) and (abs(last_b - self.b) < tol):breakcost = 0gradient_w = 0gradient_b = 0for i in range(length):cost_,gradient_w_,gradient_b_ = self.loss(X[i,0],y[i,0])cost += cost_/lengthgradient_w += gradient_w_/lengthgradient_b += gradient_b_/lengthprint('---------------------執行次數:%d。損失值是:%0.2f'%(count,cost))last_w = self.wlast_b = self.b# 更新截距和斜率self.gradient_descent(gradient_w,gradient_b,0.01)count+=1def result(self):return self.w,self.b lm = LinearModel() lm.fit(X,y) lm.result()結果:
(2.9489680632625297, 2.2698211503362224) plt.scatter(X,y,color = "red") plt.plot(x,2.94896*x + 2.2698211,color= "green") plt.rcParams['font.sans-serif'] = ['Arial Unicode MS'] plt.title("自定義的算法擬合曲線")二元一次擬合
import numpy as np import matplotlib.pyplot as plt from sklearn.linear_model import LinearRegression#f(x)w = w1*x**2 + w2*x + b #一元二次#f(x1,x2) = w1*x1 + w2*x2 +b #二元一次X = np.linspace(0,10,num = 50).reshape(-1,1) X = np.concatenate([X**2,X],axis = 1) X.shape w = np.random.randint(1,10,size = 2) b = np.random.randint(-5,5,size = 1)#矩陣乘法 y = X.dot(w) + bplt.plot(X[:,1],y,c="r") plt.title("w1:%d.w2:%d.w3:%d"%(w[0],w[1],b[0]))結果:?
Text(0.5, 1.0, 'w1:7.w2:1.w3:-5')?
#使用sklearn自帶的算法進行預測 from sklearn.linear_model import LinearRegression lr = LinearRegression() lr.fit(X,y) print(lr.coef_,lr.intercept_) plt.scatter(X[:,1],y,marker = "*")x = np.linspace(-2,12,100) plt.plot(x,7*x**2 + 1*x + -4.99,c="g")?結果:
[7. 1.] -4.999999999999972?
手寫線性回歸,擬合多屬性,多元方程?
# epoch 訓練的次數,梯度下降訓練多少 def gradient_descent(X,y,lr,epoch,w,b):# 一批量多少,長度batch = len(X)for i in range(epoch):# d_lost:是損失的梯度d_loss = 0# 梯度,斜率梯度dw = [0 for _ in range(len(w))]# 截距梯度db = 0for j in range(batch):y_ = 0 #預測的值 預測方程 y_ = f(x) = w1*x1 + w2*x2 + bfor n in range(len(w)):y_ += X[j][n]*w[n]y_ += b# (y - y_)**2 -----> 2*(y - y_)*(-1)# (y_- y)**2 -----> 2*(y_ - y)*(1)d_loss = -(y[j] - y_)for n in range(len(w)):dw[n] += X[j][n]*d_loss/float(batch)db += 1*d_loss/float(batch)# 更新一下系數和截距,梯度下降for n in range(len(w)):w[n] -= dw[n]*lr[n]b -= db*lr[0]return w,b lr = [0.0001,0.001] w = np.random.randn(2) b = np.random.randn(1)[0] gradient_descent(X,y,lr,500,w,b)?結果:
(array([ 7.18689265, -1.25846592]), 0.6693960269813103) plt.scatter(X[:,1],y,marker = "*") plt.plot(x,7.1868*x**2 - 1.2584*x + 0.6694,c="g")?
?繼續優化
總結
以上是生活随笔為你收集整理的机器学习Sklearn实战——手写线性回归的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 机器学习Sklearn实战——线性回归
- 下一篇: 机器学习Sklearn实战——其他线性模