/*
 * MIT License
 *
 * Copyright (c) 2019 Alexey Edelev <semlanik@gmail.com>
 *
 * This file is part of NeuralNetwork project https://git.semlanik.org/semlanik/NeuralNetwork
 *
 * Permission is hereby granted, free of charge, to any person obtaining a copy of this
 * software and associated documentation files (the "Software"), to deal in the Software
 * without restriction, including without limitation the rights to use, copy, modify,
 * merge, publish, distribute, sublicense, and/or sell copies of the Software, and
 * to permit persons to whom the Software is furnished to do so, subject to the following
 * conditions:
 *
 * The above copyright notice and this permission notice shall be included in all copies
 * or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
 * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
 * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
 * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
 * DEALINGS IN THE SOFTWARE.
 */

package neuralnetworkbase

import (
	teach "../teach"
	mat "gonum.org/v1/gonum/mat"
)

type batchWorker struct {
	network   *NeuralNetwork
	BGradient []BatchGradientDescent
	WGradient []BatchGradientDescent
	batchSize int
}

func newBatchWorker(nn *NeuralNetwork) (bw *batchWorker) {
	bw = &batchWorker{
		network:   nn,
		BGradient: make([]BatchGradientDescent, nn.LayerCount),
		WGradient: make([]BatchGradientDescent, nn.LayerCount),
	}

	for l := 1; l < nn.LayerCount; l++ {
		bw.BGradient[l] = nn.gradientDescentInitializer(nn, l, BiasGradient).(BatchGradientDescent)
		bw.WGradient[l] = nn.gradientDescentInitializer(nn, l, WeightGradient).(BatchGradientDescent)
	}
	return
}

func (bw *batchWorker) Run(teacher teach.Teacher) {
	for teacher.NextData() {
		dB, dW := bw.network.backward(teacher.GetData())
		for l := 1; l < bw.network.LayerCount; l++ {
			bw.BGradient[l].AccumGradients(dB[l])
			bw.WGradient[l].AccumGradients(dW[l])
		}
	}
	teacher.Reset()
}

func (bw *batchWorker) Result(layer int) (dB, dW *mat.Dense) {
	return bw.BGradient[layer].Gradients(), bw.WGradient[layer].Gradients()
}