1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798 |
- package earlystop
- import (
- "log"
- "math"
- "os"
- neuralnetwork "git.semlanik.org/semlanik/NeuralNetwork/neuralnetwork"
- training "git.semlanik.org/semlanik/NeuralNetwork/training"
- )
- const tmpFileName = "./.simpleDescentEarlyStop.nnd.tmp"
- type simpleDescentEarlyStop struct {
- lastFailRate float64
- bestFailRate float64
- failRateDeltaSum float64
- network *neuralnetwork.NeuralNetwork
- trainer training.Trainer
- glGrowCount int
- }
- func NewSimpleDescentEarlyStop(network *neuralnetwork.NeuralNetwork, trainer training.Trainer) (es *simpleDescentEarlyStop) {
- es = nil
- if network == nil || trainer == nil {
- return
- }
- es = &simpleDescentEarlyStop{
- lastFailRate: math.MaxFloat64,
- bestFailRate: math.MaxFloat64,
- failRateDeltaSum: 0.0,
- network: network,
- trainer: trainer,
- glGrowCount: 0,
- }
- return
- }
- func (es *simpleDescentEarlyStop) Test() bool {
- squareError, fails, total := es.network.Validate(es.trainer)
- es.lastFailRate = squareError / float64(total)
- log.Printf("Fail count: %v/%v, lastFailRate: %v\n", fails, total, es.lastFailRate)
- generalizationLoss := (es.lastFailRate/es.bestFailRate - 1.0)
- if es.bestFailRate > es.lastFailRate {
- es.bestFailRate = es.lastFailRate
- es.network.SaveStateToFile(tmpFileName)
- }
- if generalizationLoss > 0.0 {
- es.glGrowCount++
- } else {
- es.glGrowCount = 0
- }
- if es.glGrowCount > 5 {
- es.network.LoadStateFromFile(tmpFileName)
- os.Remove(tmpFileName)
- return true
- }
- return false
- }
- func (es *simpleDescentEarlyStop) Reset() {
- es.lastFailRate = math.MaxFloat64
- es.bestFailRate = math.MaxFloat64
- es.glGrowCount = 0
- es.failRateDeltaSum = 0.0
- }
|