123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687 |
- /*
- * MIT License
- *
- * Copyright (c) 2020 Alexey Edelev <semlanik@gmail.com>
- *
- * This file is part of NeuralNetwork project https://git.semlanik.org/semlanik/NeuralNetwork
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy of this
- * software and associated documentation files (the "Software"), to deal in the Software
- * without restriction, including without limitation the rights to use, copy, modify,
- * merge, publish, distribute, sublicense, and/or sell copies of the Software, and
- * to permit persons to whom the Software is furnished to do so, subject to the following
- * conditions:
- *
- * The above copyright notice and this permission notice shall be included in all copies
- * or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
- * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
- * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
- * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
- * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
- * DEALINGS IN THE SOFTWARE.
- */
- package earlystop
- import (
- "log"
- neuralnetwork "git.semlanik.org/semlanik/NeuralNetwork/neuralnetwork"
- training "git.semlanik.org/semlanik/NeuralNetwork/training"
- )
- type simpleDescentEarlyStop struct {
- lastFailRate float64
- bestFailRate float64
- failRateDeltaSum float64
- network *neuralnetwork.NeuralNetwork
- trainer training.Trainer
- }
- func NewSimpleDescentEarlyStop(network *neuralnetwork.NeuralNetwork, trainer training.Trainer) (es *simpleDescentEarlyStop) {
- es = nil
- if network == nil || trainer == nil {
- return
- }
- es = &simpleDescentEarlyStop{
- lastFailRate: 1.0,
- bestFailRate: 1.0,
- failRateDeltaSum: 0.0,
- network: network,
- trainer: trainer,
- }
- return
- }
- func (es *simpleDescentEarlyStop) Test() bool {
- squareError, fails, total := es.network.Validate(es.trainer)
- log.Printf("Fail count: %v/%v, error: %v\n", fails, total, squareError)
- failRate := float64(fails) / float64(total)
- failRateDelta := failRate - es.lastFailRate
- log.Printf("failRate %v lastFailRate %v failRateDelta %v \n", failRate, es.lastFailRate, failRateDelta)
- es.lastFailRate = failRate
- if failRateDelta > 0 { //Positive failRateDelta cause fail rate grow, accumulate total grow
- es.failRateDeltaSum += failRateDelta
- } else { //Reset failRateDeltaSum in case if we step over one of local maximum
- es.failRateDeltaSum = 0.0
- }
- if es.bestFailRate > es.lastFailRate {
- es.bestFailRate = es.lastFailRate
- //TODO: save neuralnetwork state at this point
- }
- return false //es.failRateDeltaSum > 0.05
- }
- func (es *simpleDescentEarlyStop) Reset() {
- es.lastFailRate = 1.0
- es.bestFailRate = 1.0
- es.failRateDeltaSum = 0.0
- }
|