瀏覽代碼

Add back propagation

Alexey Edelev 5 年之前
父節點
當前提交
a7923fbcbf
共有 3 個文件被更改,包括 64 次插入2 次删除
  1. 3 1
      neuralnetwork/main.go
  2. 8 0
      neuralnetwork/neuralnetworkbase/common.go
  3. 53 1
      neuralnetwork/neuralnetworkbase/neuralnetwork.go

+ 3 - 1
neuralnetwork/main.go

@@ -18,7 +18,7 @@ func main() {
 	}
 	aIn := mat.NewDense(sizes[0], 1, data)
 
-	nn.Forward(aIn)
+	max, index := nn.Predict(aIn)
 
 	for i := 0; i < nn.Count; i++ {
 		if i > 0 {
@@ -28,4 +28,6 @@ func main() {
 		}
 		fmt.Printf("A:\n%v\n\n", mat.Formatted(nn.A[i], mat.Prefix(""), mat.Excerpt(0)))
 	}
+
+	fmt.Printf("Resul: %v, %v\n\n", index, max)
 }

+ 8 - 0
neuralnetwork/neuralnetworkbase/common.go

@@ -19,6 +19,14 @@ func applySigmoid(_, _ int, x float64) float64 {
 	return sigmoid(x)
 }
 
+func applySigmoidPrime(_, _ int, x float64) float64 {
+	return sigmoidPrime(x)
+}
+
 func sigmoid(x float64) float64 {
 	return 1.0 / (1.0 + math.Exp(-x))
 }
+
+func sigmoidPrime(x float64) float64 {
+	return sigmoid(x) * (1 - sigmoid(x))
+}

+ 53 - 1
neuralnetwork/neuralnetworkbase/neuralnetwork.go

@@ -11,9 +11,10 @@ type NeuralNetwork struct {
 	Weights []*mat.Dense
 	A       []*mat.Dense
 	Z       []*mat.Dense
+	alpha   float64
 }
 
-func (nn *NeuralNetwork) Result() mat.Matrix {
+func (nn *NeuralNetwork) Result() *mat.Dense {
 	return nn.A[nn.Count-1]
 }
 
@@ -25,6 +26,8 @@ func NewNeuralNetwork(Sizes []int) (nn *NeuralNetwork) {
 	nn.Biases = make([]*mat.Dense, nn.Count)
 	nn.A = make([]*mat.Dense, nn.Count)
 	nn.Z = make([]*mat.Dense, nn.Count)
+	nn.alpha = 0.2 / float64(nn.Sizes[0])
+
 	for i := 1; i < nn.Count; i++ {
 		nn.Weights[i] = generateRandomDense(nn.Sizes[i], nn.Sizes[i-1])
 		nn.Biases[i] = generateRandomDense(nn.Sizes[i], 1)
@@ -32,6 +35,21 @@ func NewNeuralNetwork(Sizes []int) (nn *NeuralNetwork) {
 	return
 }
 
+func (nn *NeuralNetwork) Predict(aIn mat.Matrix) (maxIndex int, max float64) {
+	nn.Forward(aIn)
+	result := nn.Result()
+	r, _ := result.Dims()
+	max = 0.0
+	maxIndex = 0
+	for i := 0; i < r; i++ {
+		if result.At(i, 0) > max {
+			max = result.At(i, 0)
+			maxIndex = i
+		}
+	}
+	return
+}
+
 func (nn *NeuralNetwork) Forward(aIn mat.Matrix) {
 	nn.A[0] = mat.DenseCopyOf(aIn)
 
@@ -45,3 +63,37 @@ func (nn *NeuralNetwork) Forward(aIn mat.Matrix) {
 		aDst.Apply(applySigmoid, aDst)
 	}
 }
+
+func (nn *NeuralNetwork) Backward(aIn, aOut mat.Matrix) {
+	nn.Forward(aOut)
+
+	//Initial error
+	err := &mat.Dense{}
+	err.Sub(aOut, nn.Result())
+
+	for i := nn.Count - 1; i > 0; i-- {
+		sigmoidsPrime := &mat.Dense{}
+		sigmoidsPrime.Apply(applySigmoidPrime, nn.Z[i])
+
+		delta := &mat.Dense{}
+		delta.MulElem(err, sigmoidsPrime)
+		err = delta
+
+		biases := mat.DenseCopyOf(delta)
+		weights := &mat.Dense{}
+		weights.Mul(nn.A[i-1], delta)
+
+		// Scale down
+		nn.Weights[i] = makeBackGradien(weights, nn.Weights[i], nn.alpha)
+		nn.Biases[i] = makeBackGradien(biases, nn.Biases[i], nn.alpha)
+	}
+}
+
+func makeBackGradien(in mat.Matrix, actual mat.Matrix, alpha float64) *mat.Dense {
+	scaled := &mat.Dense{}
+	result := &mat.Dense{}
+
+	scaled.Scale(alpha, in)
+	result.Sub(actual, scaled)
+	return result
+}