simpledescentearlystop.go 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687
  1. /*
  2. * MIT License
  3. *
  4. * Copyright (c) 2020 Alexey Edelev <semlanik@gmail.com>
  5. *
  6. * This file is part of NeuralNetwork project https://git.semlanik.org/semlanik/NeuralNetwork
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a copy of this
  9. * software and associated documentation files (the "Software"), to deal in the Software
  10. * without restriction, including without limitation the rights to use, copy, modify,
  11. * merge, publish, distribute, sublicense, and/or sell copies of the Software, and
  12. * to permit persons to whom the Software is furnished to do so, subject to the following
  13. * conditions:
  14. *
  15. * The above copyright notice and this permission notice shall be included in all copies
  16. * or substantial portions of the Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
  19. * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
  20. * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
  21. * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
  22. * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  23. * DEALINGS IN THE SOFTWARE.
  24. */
  25. package earlystop
  26. import (
  27. "log"
  28. neuralnetwork "git.semlanik.org/semlanik/NeuralNetwork/neuralnetwork"
  29. training "git.semlanik.org/semlanik/NeuralNetwork/training"
  30. )
  31. type simpleDescentEarlyStop struct {
  32. lastFailRate float64
  33. bestFailRate float64
  34. failRateDeltaSum float64
  35. network *neuralnetwork.NeuralNetwork
  36. trainer training.Trainer
  37. }
  38. func NewSimpleDescentEarlyStop(network *neuralnetwork.NeuralNetwork, trainer training.Trainer) (es *simpleDescentEarlyStop) {
  39. es = nil
  40. if network == nil || trainer == nil {
  41. return
  42. }
  43. es = &simpleDescentEarlyStop{
  44. lastFailRate: 1.0,
  45. bestFailRate: 1.0,
  46. failRateDeltaSum: 0.0,
  47. network: network,
  48. trainer: trainer,
  49. }
  50. return
  51. }
  52. func (es *simpleDescentEarlyStop) Test() bool {
  53. squareError, fails, total := es.network.Validate(es.trainer)
  54. log.Printf("Fail count: %v/%v, error: %v\n", fails, total, squareError)
  55. failRate := float64(fails) / float64(total)
  56. failRateDelta := failRate - es.lastFailRate
  57. log.Printf("failRate %v lastFailRate %v failRateDelta %v \n", failRate, es.lastFailRate, failRateDelta)
  58. es.lastFailRate = failRate
  59. if failRateDelta > 0 { //Positive failRateDelta cause fail rate grow, accumulate total grow
  60. es.failRateDeltaSum += failRateDelta
  61. } else { //Reset failRateDeltaSum in case if we step over one of local maximum
  62. es.failRateDeltaSum = 0.0
  63. }
  64. if es.bestFailRate > es.lastFailRate {
  65. es.bestFailRate = es.lastFailRate
  66. //TODO: save neuralnetwork state at this point
  67. }
  68. return false //es.failRateDeltaSum > 0.05
  69. }
  70. func (es *simpleDescentEarlyStop) Reset() {
  71. es.lastFailRate = 1.0
  72. es.bestFailRate = 1.0
  73. es.failRateDeltaSum = 0.0
  74. }