backpropgradient.go 2.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. /*
  2. * MIT License
  3. *
  4. * Copyright (c) 2019 Alexey Edelev <semlanik@gmail.com>
  5. *
  6. * This file is part of NeuralNetwork project https://git.semlanik.org/semlanik/NeuralNetwork
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a copy of this
  9. * software and associated documentation files (the "Software"), to deal in the Software
  10. * without restriction, including without limitation the rights to use, copy, modify,
  11. * merge, publish, distribute, sublicense, and/or sell copies of the Software, and
  12. * to permit persons to whom the Software is furnished to do so, subject to the following
  13. * conditions:
  14. *
  15. * The above copyright notice and this permission notice shall be included in all copies
  16. * or substantial portions of the Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
  19. * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
  20. * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
  21. * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
  22. * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  23. * DEALINGS IN THE SOFTWARE.
  24. */
  25. package gradients
  26. import (
  27. neuralnetwork "git.semlanik.org/semlanik/NeuralNetwork/neuralnetwork"
  28. mat "gonum.org/v1/gonum/mat"
  29. )
  30. // Simple backpropagation with constant value η
  31. type backPropGradient struct {
  32. alpha float64
  33. }
  34. func NewBackPropInitializer(nu float64) neuralnetwork.GradientDescentInitializer {
  35. return func(nn *neuralnetwork.NeuralNetwork, layer, gradientType int) interface{} {
  36. return newBackPropGradient(nu / float64(nn.Sizes[0]))
  37. }
  38. }
  39. func newBackPropGradient(a float64) (g *backPropGradient) {
  40. g = &backPropGradient{alpha: a}
  41. return
  42. }
  43. func (g *backPropGradient) ApplyDelta(m mat.Matrix, gradient mat.Matrix) (result *mat.Dense) {
  44. // Gradient change of actual matrix using:
  45. // m[l]′ = m[l] − η * ∂C/∂m
  46. // Where ∂E/∂m is `in` matrix
  47. scaled := &mat.Dense{}
  48. result = &mat.Dense{}
  49. // η * ∂E/∂m
  50. scaled.Scale(g.alpha, gradient)
  51. // m[l] − η * ∂E/∂m
  52. result.Sub(m, scaled)
  53. return result
  54. }