package neuralnetworkbase import ( rand "math/rand" "time" mat "gonum.org/v1/gonum/mat" ) type NeuralNetwork struct { Count int Sizes []int Biases []*mat.Dense Weights []*mat.Dense A []*mat.Dense Z []*mat.Dense alpha float64 trainingCycles int } func (nn *NeuralNetwork) Result() *mat.Dense { return nn.A[nn.Count-1] } func NewNeuralNetwork(Sizes []int, nu float64, trainingCycles int) (nn *NeuralNetwork) { nn = &NeuralNetwork{} nn.Sizes = Sizes nn.Count = len(Sizes) nn.Weights = make([]*mat.Dense, nn.Count) nn.Biases = make([]*mat.Dense, nn.Count) nn.A = make([]*mat.Dense, nn.Count) nn.Z = make([]*mat.Dense, nn.Count) nn.alpha = nu / float64(nn.Sizes[0]) nn.trainingCycles = trainingCycles for i := 1; i < nn.Count; i++ { nn.Weights[i] = generateRandomDense(nn.Sizes[i], nn.Sizes[i-1]) nn.Biases[i] = generateRandomDense(nn.Sizes[i], 1) } return } func (nn *NeuralNetwork) Predict(aIn mat.Matrix) (maxIndex int, max float64) { nn.Forward(aIn) result := nn.Result() r, _ := result.Dims() max = 0.0 maxIndex = 0 for i := 0; i < r; i++ { if result.At(i, 0) > max { max = result.At(i, 0) maxIndex = i } } return } func (nn *NeuralNetwork) Train(dataSet, expect []*mat.Dense) { rand.Seed(time.Now().UnixNano()) dataSetSize := len(dataSet) // randomIndex := rand.Int() % dataSetSize // fmt.Printf("Train: %v\n", randomIndex) for i := 0; i < nn.trainingCycles; i++ { for j := dataSetSize - 1; j >= 0; j -= 3 { if j < 0 { j = 0 } nn.Backward(dataSet[j], expect[j]) } // _, max := nn.Predict(dataSet[randomIndex]) // if 1.0-max < 0.2 { // break // } } } func (nn *NeuralNetwork) SaveState(filename string) { } func (nn *NeuralNetwork) LoadState(filename string) { } func (nn *NeuralNetwork) Forward(aIn mat.Matrix) { nn.A[0] = mat.DenseCopyOf(aIn) for i := 1; i < nn.Count; i++ { nn.A[i] = mat.NewDense(nn.Sizes[i], 1, nil) aSrc := nn.A[i-1] aDst := nn.A[i] // r, c := nn.Weights[i].Dims() // fmt.Printf("r: %v,c: %v\n", r, c) // r, c = aSrc.Dims() // fmt.Printf("src r: %v,c: %v\n\n\n", r, c) aDst.Mul(nn.Weights[i], aSrc) aDst.Add(aDst, nn.Biases[i]) nn.Z[i] = mat.DenseCopyOf(aDst) aDst.Apply(applySigmoid, aDst) } } func (nn *NeuralNetwork) Backward(aIn, aOut mat.Matrix) { nn.Forward(aIn) lastLayerNum := nn.Count - 1 //Initial error err := &mat.Dense{} err.Sub(nn.Result(), aOut) sigmoidsPrime := &mat.Dense{} sigmoidsPrime.Apply(applySigmoidPrime, nn.Z[lastLayerNum]) delta := &mat.Dense{} delta.MulElem(err, sigmoidsPrime) biases := mat.DenseCopyOf(delta) weights := &mat.Dense{} weights.Mul(delta, nn.A[lastLayerNum-1].T()) newBiases := []*mat.Dense{makeBackGradien(biases, nn.Biases[lastLayerNum], nn.alpha)} newWeights := []*mat.Dense{makeBackGradien(weights, nn.Weights[lastLayerNum], nn.alpha)} err = delta for i := nn.Count - 2; i > 0; i-- { sigmoidsPrime := &mat.Dense{} sigmoidsPrime.Apply(applySigmoidPrime, nn.Z[i]) delta := &mat.Dense{} wdelta := &mat.Dense{} wdelta.Mul(nn.Weights[i+1].T(), err) delta.MulElem(wdelta, sigmoidsPrime) err = delta biases := mat.DenseCopyOf(delta) weights := &mat.Dense{} weights.Mul(delta, nn.A[i-1].T()) // Scale down newBiases = append([]*mat.Dense{makeBackGradien(biases, nn.Biases[i], nn.alpha)}, newBiases...) newWeights = append([]*mat.Dense{makeBackGradien(weights, nn.Weights[i], nn.alpha)}, newWeights...) } newBiases = append([]*mat.Dense{&mat.Dense{}}, newBiases...) newWeights = append([]*mat.Dense{&mat.Dense{}}, newWeights...) nn.Biases = newBiases nn.Weights = newWeights } func makeBackGradien(in mat.Matrix, actual mat.Matrix, alpha float64) *mat.Dense { scaled := &mat.Dense{} result := &mat.Dense{} scaled.Scale(alpha, in) result.Sub(actual, scaled) return result }