gradients.go 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102
  1. /*
  2. * MIT License
  3. *
  4. * Copyright (c) 2019 Alexey Edelev <semlanik@gmail.com>
  5. *
  6. * This file is part of NeuralNetwork project https://git.semlanik.org/semlanik/NeuralNetwork
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a copy of this
  9. * software and associated documentation files (the "Software"), to deal in the Software
  10. * without restriction, including without limitation the rights to use, copy, modify,
  11. * merge, publish, distribute, sublicense, and/or sell copies of the Software, and
  12. * to permit persons to whom the Software is furnished to do so, subject to the following
  13. * conditions:
  14. *
  15. * The above copyright notice and this permission notice shall be included in all copies
  16. * or substantial portions of the Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
  19. * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
  20. * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
  21. * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
  22. * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  23. * DEALINGS IN THE SOFTWARE.
  24. */
  25. package neuralnetworkbase
  26. import (
  27. "math"
  28. mat "gonum.org/v1/gonum/mat"
  29. )
  30. type RPropGradient struct {
  31. Gradients *mat.Dense
  32. Deltas *mat.Dense
  33. }
  34. func NewRPropGradient(r, c int) (g *RPropGradient) {
  35. g = &RPropGradient{}
  36. deltas := make([]float64, r*c)
  37. for j, _ := range deltas {
  38. deltas[j] = 0.1
  39. }
  40. g.Gradients = mat.NewDense(r, c, nil)
  41. g.Deltas = mat.NewDense(r, c, deltas)
  42. return
  43. }
  44. func (g *RPropGradient) ApplyDelta(m mat.Matrix, derivative mat.Matrix) (result *mat.Dense) {
  45. //TODO: move this hardcoded parameters to separate config for gradient
  46. nuPlus := 1.2
  47. nuMinus := 0.5
  48. deltaMax := 50.0
  49. deltaMin := 0.000001
  50. result = &mat.Dense{}
  51. result.Apply(func(i, j int, v float64) (outV float64) {
  52. gradientSign := g.Gradients.At(i, j) * derivative.At(i, j)
  53. if gradientSign > 0 {
  54. g.Deltas.Set(i, j, math.Min(nuPlus*g.Deltas.At(i, j), deltaMax))
  55. outV = v - sign(derivative.At(i, j))*g.Deltas.At(i, j)
  56. g.Gradients.Set(i, j, derivative.At(i, j))
  57. } else if gradientSign < 0 {
  58. outV = v + sign(g.Gradients.At(i, j))*g.Deltas.At(i, j)
  59. g.Deltas.Set(i, j, math.Max(nuMinus*g.Deltas.At(i, j), deltaMin))
  60. g.Gradients.Set(i, j, 0.0)
  61. } else {
  62. outV = v - sign(derivative.At(i, j))*g.Deltas.At(i, j)
  63. g.Gradients.Set(i, j, derivative.At(i, j))
  64. }
  65. return
  66. }, m)
  67. return result
  68. }
  69. //Simple backpropagation with constant value η
  70. type BackPropGradient struct {
  71. alpha float64
  72. }
  73. func (g *BackPropGradient) ApplyDelta(m mat.Matrix, derivative mat.Matrix) (result *mat.Dense) {
  74. // Gradient change of actual matrix using:
  75. // m[l]′ = m[l] − η * ∂C/∂m
  76. // Where ∂E/∂m is `in` matrix
  77. scaled := &mat.Dense{}
  78. result = &mat.Dense{}
  79. // η * ∂E/∂m
  80. scaled.Scale(g.alpha, derivative)
  81. // m[l] − η * ∂E/∂m
  82. result.Sub(m, scaled)
  83. return result
  84. }