functionMyGetline(x, y){//我們使用梯度下降法,所以要求梯度方向constgradient=function(x, h, y){let g =[]for(let j =0; j < x[0].length; j++){let c =0for(let i =0; i < y.length; i++){c = c + x[i][j]*(h[i]- y[i])}c = c / y.lengthg.push(c)}return g}//線性回歸過程x,y是入參和輸出數組,lr是學習率(沿梯度方向下降距離),count是重復次數functionLine_Regression(x, y, lr=0.000001, count=50000){let w =[]x.map(item =>{item.push(1)})for(let i =0; i < x[0].length; i++){w.push(0)}for(let m =0; m < count; m++){let z =[]for(let i =0; i < x.length; i++){let item =0for(let j =0; j < w.length; j++){item = item + x[i][j]* w[j]}z.push(item)}let g =gradient(x, z, y)for(let i =0; i < w.length; i++){w[i]= w[i]- lr * g[i]}}return w}let w =Line_Regression(x,y)//返回w系數return w}
$.get(ROOT_PATH+'data/asset/data/life-expectancy-table.json',function(data){let symbolSize =2.5;let mydata =[]let x =[]let y =[]for(let i=0; i<500; i++){let a = i * Math.random()*10let b = i * Math.random()*10let c =9* a +14* b +(Math.random()-0.5)* i *30let item =[]item.push(a)item.push(b)item.push(c)let xitem =[]xitem.push(a)xitem.push(b)x.push(xitem)y.push(c)mydata.push(item)}console.log(mydata)option ={grid3D:{},xAxis3D:{},yAxis3D:{},zAxis3D:{},dataset:{source: mydata},series:[{type:'scatter3D',symbolSize: symbolSize}]};myChart.setOption(option);//入參x是輸入因變量數組,入參y是輸出數組functionMyGetline(x, y){//梯度方向constgradient=function(x, h, y){let g =[]for(let j =0; j < x[0].length; j++){let c =0for(let i =0; i < y.length; i++){c = c + x[i][j]*(h[i]- y[i])}c = c / y.lengthg.push(c)}return g}//邏輯回歸過程functionLine_Regression(x, y, lr=0.0000001, count=50000){let w =[]x.map(item =>{item.push(1)})for(let i =0; i < x[0].length; i++){w.push(0)}for(let m =0; m < count; m++){let z =[]for(let i =0; i < x.length; i++){let item =0for(let j =0; j < w.length; j++){item = item + x[i][j]* w[j]}z.push(item)}let g =gradient(x, z, y)for(let i =0; i < w.length; i++){w[i]= w[i]- lr * g[i]}// l = loss(h, y)}return w}let w =Line_Regression(x,y)//使用求出的權重系數進行選擇console.log(w)}MyGetline(x,y)});