深入浅出python机器学习_4.3.1_岭回归的原理 4.3.2_岭回归的参数调节
生活随笔
收集整理的這篇文章主要介紹了
深入浅出python机器学习_4.3.1_岭回归的原理 4.3.2_岭回归的参数调节
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
# 重啟顯示圖加這句代碼就好了
%matplotlib inlinefrom sklearn.linear_model import LinearRegressionfrom sklearn.model_selection import train_test_splitfrom sklearn.datasets import load_diabetesX1,y1=load_diabetes().data,load_diabetes().targetX1_train,X1_test,y1_train,y1_test=train_test_split(X1,y1,random_state=8)lr=LinearRegression().fit(X1_train,y1_train)
from sklearn.model_selection import train_test_splitfrom sklearn.linear_model import Ridgefrom sklearn.datasets import load_diabetesX,y=load_diabetes().data,load_diabetes().targetX_train,X_test,y_train,y_test=train_test_split(X,y,random_state=8)ridge=Ridge().fit(X_train,y_train)print('嶺回歸訓練數據集得分:{:.2f}'.format(ridge.score(X_train,y_train)))print('嶺回歸訓練數據集得分:{:.2f}'.format(ridge.score(X_test,y_test)))
嶺回歸訓練數據集得分:0.43
嶺回歸訓練數據集得分:0.43
ridge10=Ridge(alpha=10).fit(X_train,y_train)print('嶺回歸訓練數據集得分:{:.2f}'.format(ridge10.score(X_train,y_train)))print('嶺回歸訓練數據集得分:{:.2f}'.format(ridge10.score(X_test,y_test)))
嶺回歸訓練數據集得分:0.15
嶺回歸訓練數據集得分:0.16
ridge01=Ridge(alpha=0.1).fit(X_train,y_train)print('嶺回歸訓練數據集得分:{:.2f}'.format(ridge01.score(X_train,y_train)))print('嶺回歸訓練數據集得分:{:.2f}'.format(ridge01.score(X_test,y_test)))
嶺回歸訓練數據集得分:0.52
嶺回歸訓練數據集得分:0.47
ridge01_2=Ridge(alpha=0.1).fit(X_train,y_train)
print('初始得分為:{} {}'.format(round(ridge01.score(X_train,y_train),4),round(ridge01.score(X_test,y_test),4)))
print('*'*50)i=0.1while round(ridge01_2.score(X_train,y_train),4)>round(ridge01_2.score(X_test,y_test),4):i+=0.01ridge01_2=Ridge(alpha=i).fit(X_train,y_train)if round(ridge01_2.score(X_train,y_train),100)<=round(ridge01_2.score(X_test,y_test),100):print(i)print('嶺回歸訓練數據集得分:{:.4f}'.format(ridge01_2.score(X_train,y_train)))print('嶺回歸訓練數據集得分:{:.4f}'.format(ridge01_2.score(X_test,y_test)))print('*'*50)break
初始得分為:0.5216 0.4734
**************************************************
1.0100000000000007
嶺回歸訓練數據集得分:0.4317
嶺回歸訓練數據集得分:0.4318
**************************************************
import matplotlib.pyplot as pltplt.plot(ridge.coef_,'s',label='Ridge alpha=1')plt.plot(ridge10.coef_,'^',label='Ridge alpha=10')plt.plot(ridge01.coef_,'v',label='Ridge alpha=0.1')plt.plot(lr.coef_,'o',label='linear regression')# 打印lr.coef_
print('lr.coef_:\n{}'.format(lr.coef_))# 打印lr.coef_類型:
print('lr.coef_類型:\n{}'.format(type(lr.coef_)))# 系數指數
plt.xlabel('coefficient index')# 系數大小(值)
plt.ylabel('coefficient magnitude')# 畫直線
plt.hlines(0,0,len(lr.coef_))# Place a legend on the axes.在坐標軸上放置一個圖例
# plt.legendprint(y)
print(len(y))
print(y.shape)print('*'*50)print(X)
print(len(X))
print(X.shape)
lr.coef_:
[ 11.5106203 -282.51347161 534.20455671 401.73142674-1043.89718398 634.92464089 186.43262636 204.93373199762.47149733 91.9460394 ]
lr.coef_類型:
<class 'numpy.ndarray'>
[151. 75. 141. 206. 135. 97. 138. 63. 110. 310. 101. 69. 179. 185.118. 171. 166. 144. 97. 168. 68. 49. 68. 245. 184. 202. 137. 85.131. 283. 129. 59. 341. 87. 65. 102. 265. 276. 252. 90. 100. 55.61. 92. 259. 53. 190. 142. 75. 142. 155. 225. 59. 104. 182. 128.52. 37. 170. 170. 61. 144. 52. 128. 71. 163. 150. 97. 160. 178.48. 270. 202. 111. 85. 42. 170. 200. 252. 113. 143. 51. 52. 210.65. 141. 55. 134. 42. 111. 98. 164. 48. 96. 90. 162. 150. 279.92. 83. 128. 102. 302. 198. 95. 53. 134. 144. 232. 81. 104. 59.246. 297. 258. 229. 275. 281. 179. 200. 200. 173. 180. 84. 121. 161.99. 109. 115. 268. 274. 158. 107. 83. 103. 272. 85. 280. 336. 281.118. 317. 235. 60. 174. 259. 178. 128. 96. 126. 288. 88. 292. 71.197. 186. 25. 84. 96. 195. 53. 217. 172. 131. 214. 59. 70. 220.268. 152. 47. 74. 295. 101. 151. 127. 237. 225. 81. 151. 107. 64.138. 185. 265. 101. 137. 143. 141. 79. 292. 178. 91. 116. 86. 122.72. 129. 142. 90. 158. 39. 196. 222. 277. 99. 196. 202. 155. 77.191. 70. 73. 49. 65. 263. 248. 296. 214. 185. 78. 93. 252. 150.77. 208. 77. 108. 160. 53. 220. 154. 259. 90. 246. 124. 67. 72.257. 262. 275. 177. 71. 47. 187. 125. 78. 51. 258. 215. 303. 243.91. 150. 310. 153. 346. 63. 89. 50. 39. 103. 308. 116. 145. 74.45. 115. 264. 87. 202. 127. 182. 241. 66. 94. 283. 64. 102. 200.265. 94. 230. 181. 156. 233. 60. 219. 80. 68. 332. 248. 84. 200.55. 85. 89. 31. 129. 83. 275. 65. 198. 236. 253. 124. 44. 172.114. 142. 109. 180. 144. 163. 147. 97. 220. 190. 109. 191. 122. 230.242. 248. 249. 192. 131. 237. 78. 135. 244. 199. 270. 164. 72. 96.306. 91. 214. 95. 216. 263. 178. 113. 200. 139. 139. 88. 148. 88.243. 71. 77. 109. 272. 60. 54. 221. 90. 311. 281. 182. 321. 58.262. 206. 233. 242. 123. 167. 63. 197. 71. 168. 140. 217. 121. 235.245. 40. 52. 104. 132. 88. 69. 219. 72. 201. 110. 51. 277. 63.118. 69. 273. 258. 43. 198. 242. 232. 175. 93. 168. 275. 293. 281.72. 140. 189. 181. 209. 136. 261. 113. 131. 174. 257. 55. 84. 42.146. 212. 233. 91. 111. 152. 120. 67. 310. 94. 183. 66. 173. 72.49. 64. 48. 178. 104. 132. 220. 57.]
442
(442,)
**************************************************
[[ 0.03807591 0.05068012 0.06169621 ... -0.00259226 0.01990842-0.01764613][-0.00188202 -0.04464164 -0.05147406 ... -0.03949338 -0.06832974-0.09220405][ 0.08529891 0.05068012 0.04445121 ... -0.00259226 0.00286377-0.02593034]...[ 0.04170844 0.05068012 -0.01590626 ... -0.01107952 -0.046879480.01549073][-0.04547248 -0.04464164 0.03906215 ... 0.02655962 0.04452837-0.02593034][-0.04547248 -0.04464164 -0.0730303 ... -0.03949338 -0.004219860.00306441]]
442
(442, 10)
總結
以上是生活随笔為你收集整理的深入浅出python机器学习_4.3.1_岭回归的原理 4.3.2_岭回归的参数调节的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: python中numpy.ndarray
- 下一篇: python sklearn.learn