Skip to content

Commit

Permalink
used gofmt -d -s -w *.go
Browse files Browse the repository at this point in the history
  • Loading branch information
pascal committed Feb 21, 2018
1 parent 5cad074 commit e7de543
Show file tree
Hide file tree
Showing 7 changed files with 242 additions and 234 deletions.
25 changes: 13 additions & 12 deletions base/GD.go
Original file line number Diff line number Diff line change
@@ -1,32 +1,33 @@
package base

import (
_ "fmt"
"math"
)

// GD contains data for Gradient Descent regressor
type GD struct {
RegressorMixin
Epochs int
LearningRate, Decay, Tol, Momentum, Alpha, L1_ratio float
Coefs_ []float
Epochs int
LearningRate, Decay, Tol, Momentum, Alpha, L1Ratio float
Coefs []float
}

// NewGD create a GD with reasonable defaults
func NewGD() *GD {
self := &GD{Epochs: 3000, LearningRate: 1e-3, Decay: .95, Tol: 1e-3, Momentum: .5, L1_ratio: .15}
self := &GD{Epochs: 3000, LearningRate: 1e-3, Decay: .95, Tol: 1e-3, Momentum: .5}
self.Predicter = self
return self
}

// Fit learns GD Coefs
// adapted from gdSolver from https://github.com/ohheydom/linearregression/blob/master/linear_regression.go
// Gradient Descent algorithm.

func (gd *GD) Fit(x [][]float64, y []float64) *GD {
n, nFeatures := len(x), len(x[0])
gamma := gd.LearningRate / float(n)
w := make([]float64, nFeatures+1)
dw := make([]float64, nFeatures+1)
gd.Coefs_ = w
gd.Coefs = w
errors := make([]float64, n)
for i := 0; i < gd.Epochs; i++ {
Shuffle(x, y)
Expand All @@ -53,8 +54,8 @@ func (gd *GD) Fit(x [][]float64, y []float64) *GD {
w[l] += dw[l]
}
}
// L1 : floats.sum(ewize(w,math.Abs));L2:=sum(ewise(w,func(w float)float{return w*w}));R=gd.L1_ratio*L1+(1-gd.L1_ratio*L2)
// TODO: use L1_ratio
// L1 : floats.sum(ewize(w,math.Abs));L2:=sum(ewise(w,func(w float)float{return w*w}));R=gd.L1Ratio*L1+(1-gd.L1Ratio*L2)
// TODO: use L1Ratio

//decrease lr/n
// TODO learning_rate=optimal eta(t)=1/(alpha*(t0+t))
Expand All @@ -65,13 +66,13 @@ func (gd *GD) Fit(x [][]float64, y []float64) *GD {
break
}
}
gd.Coefs_ = w
gd.Coefs = w
return gd
}

// predY uses the given weights to calculate each sample's label.
// Predict uses the GD Coefs to calculate each sample's label.
func (gd *GD) Predict(x [][]float64) []float64 {
w := gd.Coefs_
w := gd.Coefs
n, nFeatures := len(x), len(x[0])
predY := make([]float64, n)
for i := 0; i < n; i++ {
Expand Down
15 changes: 10 additions & 5 deletions base/base.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,25 @@ import (

type float = float64

// Predicter is an interface for Predict method
type Predicter interface {
Predict([][]float) []float
}

// RegressorMixin is a base for predicters. provides a Score(X,w,weights) method
type RegressorMixin struct{ Predicter }

func (self *RegressorMixin) Score(X [][]float, y, sample_weight []float) float {
y_pred := self.Predict(X)
return metrics.R2Score(y, y_pred, sample_weight, "variance_weighted")
// Score returns R2Score of predicter
func (predicter *RegressorMixin) Score(X [][]float, y, sampleWeight []float) float {
yPred := predicter.Predict(X)
return metrics.R2Score(y, yPred, sampleWeight, "variance_weighted")
}

// Shuffle shuffles X,y samples
func Shuffle(X [][]float, y []float) {
n_samples := len(X)
nSamples := len(X)
for i := range X {
j := i + rand.Intn(n_samples-i)
j := i + rand.Intn(nSamples-i)
X[i], X[j] = X[j], X[i]
if y != nil {
y[i], y[j] = y[j], y[i]
Expand Down
2 changes: 2 additions & 0 deletions linear_model/Base.go
Original file line number Diff line number Diff line change
Expand Up @@ -522,3 +522,5 @@ func preprocess_data(X [][]float, y []float, fit_intercept bool, normalize bool)
return

}

func unused(...interface{}) {}
Loading

0 comments on commit e7de543

Please sign in to comment.