/* * MIT License * * Copyright (c) 2020 Alexey Edelev * * This file is part of NeuralNetwork project https://git.semlanik.org/semlanik/NeuralNetwork * * Permission is hereby granted, free of charge, to any person obtaining a copy of this * software and associated documentation files (the "Software"), to deal in the Software * without restriction, including without limitation the rights to use, copy, modify, * merge, publish, distribute, sublicense, and/or sell copies of the Software, and * to permit persons to whom the Software is furnished to do so, subject to the following * conditions: * * The above copyright notice and this permission notice shall be included in all copies * or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ package earlystop import ( "log" "math" "os" neuralnetwork "git.semlanik.org/semlanik/NeuralNetwork/neuralnetwork" training "git.semlanik.org/semlanik/NeuralNetwork/training" ) const tmpFileName = "./.simpleDescentEarlyStop.nnd.tmp" type simpleDescentEarlyStop struct { lastFailRate float64 bestFailRate float64 failRateDeltaSum float64 network *neuralnetwork.NeuralNetwork trainer training.Trainer glGrowCount int } func NewSimpleDescentEarlyStop(network *neuralnetwork.NeuralNetwork, trainer training.Trainer) (es *simpleDescentEarlyStop) { es = nil if network == nil || trainer == nil { return } es = &simpleDescentEarlyStop{ lastFailRate: math.MaxFloat64, bestFailRate: math.MaxFloat64, failRateDeltaSum: 0.0, network: network, trainer: trainer, glGrowCount: 0, } return } func (es *simpleDescentEarlyStop) Test() bool { squareError, fails, total := es.network.Validate(es.trainer) es.lastFailRate = squareError / float64(total) log.Printf("Fail count: %v/%v, lastFailRate: %v\n", fails, total, es.lastFailRate) generalizationLoss := (es.lastFailRate/es.bestFailRate - 1.0) if es.bestFailRate > es.lastFailRate { es.bestFailRate = es.lastFailRate es.network.SaveStateToFile(tmpFileName) } if generalizationLoss > 0.0 { es.glGrowCount++ } else { es.glGrowCount = 0 } if es.glGrowCount > 5 { es.network.LoadStateFromFile(tmpFileName) os.Remove(tmpFileName) return true } return false } func (es *simpleDescentEarlyStop) Reset() { es.lastFailRate = math.MaxFloat64 es.bestFailRate = math.MaxFloat64 es.glGrowCount = 0 es.failRateDeltaSum = 0.0 }