Bladeren bron

Add remote control and state watcher

TODO: Not implemented yet. Only interfaces added
Alexey Edelev 5 jaren geleden
bovenliggende
commit
892464831e

+ 4 - 0
.gitignore

@@ -0,0 +1,4 @@
+src
+bin
+pkg
+

+ 8 - 0
build.sh

@@ -1,6 +1,14 @@
 export GOPATH=$PWD
 export PATH=$PATH:$PWD/bin
 export GOBIN=$PWD/bin
+export RPC_PATH=$PWD/neuralnetwork/remotecontrol
+
+go get github.com/golang/protobuf/protoc-gen-go
+go install ./src/github.com/golang/protobuf/protoc-gen-go
+
+mkdir -p $RPC_PATH
+rm -f $RPC_PATH/*.pb.go
+protoc -I$RPC_PATH --go_out=plugins=grpc:$RPC_PATH $RPC_PATH/remotecontrol.proto
 
 cd neuralnetwork
 

+ 5 - 2
neuralnetwork/main.go

@@ -6,11 +6,12 @@ import (
 	"os"
 
 	neuralnetwork "./neuralnetworkbase"
+	remotecontrol "./remotecontrol"
 	teach "./teach"
 )
 
 func main() {
-	sizes := []int{13, 16, 16, 3}
+	sizes := []int{13, 8, 8, 8, 8, 3}
 	nn, _ := neuralnetwork.NewNeuralNetwork(sizes, neuralnetwork.NewRPropInitializer(neuralnetwork.RPropConfig{
 		NuPlus:   1.2,
 		NuMinus:  0.5,
@@ -18,6 +19,8 @@ func main() {
 		DeltaMin: 0.000001,
 	}))
 
+	rc := &remotecontrol.RemoteControl{}
+	nn.SetStateWatcher(rc)
 	// inFile, err := os.Open("./networkstate")
 	// if err != nil {
 	// 	log.Fatal(err)
@@ -38,7 +41,7 @@ func main() {
 
 	// teacher := teach.NewMNISTReader("./minst.data", "./mnist.labels")
 	teacher := teach.NewTextDataReader("wine.data", 7)
-	nn.Teach(teacher, 500)
+	nn.Teach(teacher, 1000)
 
 	// for i := 0; i < nn.Count; i++ {
 	// 	if i > 0 {

+ 4 - 4
neuralnetwork/neuralnetworkbase/batchworker.go

@@ -40,11 +40,11 @@ type batchWorker struct {
 func newBatchWorker(nn *NeuralNetwork) (bw *batchWorker) {
 	bw = &batchWorker{
 		network:   nn,
-		BGradient: make([]BatchGradientDescent, nn.LayerCount),
-		WGradient: make([]BatchGradientDescent, nn.LayerCount),
+		BGradient: make([]BatchGradientDescent, nn.layerCount),
+		WGradient: make([]BatchGradientDescent, nn.layerCount),
 	}
 
-	for l := 1; l < nn.LayerCount; l++ {
+	for l := 1; l < nn.layerCount; l++ {
 		bw.BGradient[l] = nn.gradientDescentInitializer(nn, l, BiasGradient).(BatchGradientDescent)
 		bw.WGradient[l] = nn.gradientDescentInitializer(nn, l, WeightGradient).(BatchGradientDescent)
 	}
@@ -54,7 +54,7 @@ func newBatchWorker(nn *NeuralNetwork) (bw *batchWorker) {
 func (bw *batchWorker) Run(teacher teach.Teacher, startIndex, endIndex int) {
 	for i := startIndex; i < endIndex; i++ {
 		dB, dW := bw.network.backward(teacher.GetDataByIndex(i))
-		for l := 1; l < bw.network.LayerCount; l++ {
+		for l := 1; l < bw.network.layerCount; l++ {
 			bw.BGradient[l].AccumGradients(dB[l])
 			bw.WGradient[l].AccumGradients(dW[l])
 		}

+ 7 - 0
neuralnetwork/neuralnetworkbase/interface.go

@@ -45,3 +45,10 @@ type BatchGradientDescent interface {
 	AccumGradients(gradient mat.Matrix)
 	Gradients() *mat.Dense
 }
+
+type StateWatcher interface {
+	Init(nn *NeuralNetwork)
+	UpdateActivations(l int, a *mat.Dense)
+	UpdateBiases(l int, biases *mat.Dense)
+	UpdateWeights(l int, weights *mat.Dense)
+}

+ 53 - 32
neuralnetwork/neuralnetworkbase/neuralnetwork.go

@@ -98,13 +98,14 @@ import (
 //       L = len(Sizes) - Number of neural network layers
 
 type NeuralNetwork struct {
-	LayerCount                 int
+	layerCount                 int
 	Sizes                      []int
 	Biases                     []*mat.Dense
 	Weights                    []*mat.Dense
 	BGradient                  []interface{}
 	WGradient                  []interface{}
 	gradientDescentInitializer GradientDescentInitializer
+	watcher                    StateWatcher
 }
 
 func NewNeuralNetwork(sizes []int, gradientDescentInitializer GradientDescentInitializer) (nn *NeuralNetwork, err error) {
@@ -123,15 +124,15 @@ func NewNeuralNetwork(sizes []int, gradientDescentInitializer GradientDescentIni
 
 	nn = &NeuralNetwork{}
 	nn.Sizes = sizes
-	nn.LayerCount = len(sizes)
-	nn.Biases = make([]*mat.Dense, nn.LayerCount)
-	nn.Weights = make([]*mat.Dense, nn.LayerCount)
-	nn.BGradient = make([]interface{}, nn.LayerCount)
-	nn.WGradient = make([]interface{}, nn.LayerCount)
+	nn.layerCount = len(sizes)
+	nn.Biases = make([]*mat.Dense, nn.layerCount)
+	nn.Weights = make([]*mat.Dense, nn.layerCount)
+	nn.BGradient = make([]interface{}, nn.layerCount)
+	nn.WGradient = make([]interface{}, nn.layerCount)
 
 	nn.gradientDescentInitializer = gradientDescentInitializer
 
-	for l := 1; l < nn.LayerCount; l++ {
+	for l := 1; l < nn.layerCount; l++ {
 		nn.Biases[l] = generateRandomDense(nn.Sizes[l], 1)
 		nn.Weights[l] = generateRandomDense(nn.Sizes[l], nn.Sizes[l-1])
 		nn.BGradient[l] = nn.gradientDescentInitializer(nn, l, BiasGradient)
@@ -140,6 +141,11 @@ func NewNeuralNetwork(sizes []int, gradientDescentInitializer GradientDescentIni
 	return
 }
 
+func (nn *NeuralNetwork) SetStateWatcher(watcher StateWatcher) {
+	nn.watcher = watcher
+	watcher.Init(nn)
+}
+
 func (nn *NeuralNetwork) Predict(aIn mat.Matrix) (maxIndex int, max float64) {
 	r, _ := aIn.Dims()
 	if r != nn.Sizes[0] {
@@ -148,7 +154,7 @@ func (nn *NeuralNetwork) Predict(aIn mat.Matrix) (maxIndex int, max float64) {
 	}
 
 	A, _ := nn.forward(aIn)
-	result := A[nn.LayerCount-1]
+	result := A[nn.layerCount-1]
 	r, _ = result.Dims()
 	max = 0.0
 	maxIndex = 0
@@ -162,9 +168,9 @@ func (nn *NeuralNetwork) Predict(aIn mat.Matrix) (maxIndex int, max float64) {
 }
 
 func (nn *NeuralNetwork) Teach(teacher teach.Teacher, epocs int) {
-	if _, ok := nn.WGradient[nn.LayerCount-1].(OnlineGradientDescent); ok {
+	if _, ok := nn.WGradient[nn.layerCount-1].(OnlineGradientDescent); ok {
 		nn.TeachOnline(teacher, epocs)
-	} else if _, ok := nn.WGradient[nn.LayerCount-1].(BatchGradientDescent); ok {
+	} else if _, ok := nn.WGradient[nn.layerCount-1].(BatchGradientDescent); ok {
 		nn.TeachBatch(teacher, epocs)
 	} else {
 		panic("Invalid gradient descent type")
@@ -175,7 +181,7 @@ func (nn *NeuralNetwork) TeachOnline(teacher teach.Teacher, epocs int) {
 	for t := 0; t < epocs; t++ {
 		for teacher.NextData() {
 			dB, dW := nn.backward(teacher.GetData())
-			for l := 1; l < nn.LayerCount; l++ {
+			for l := 1; l < nn.layerCount; l++ {
 				bGradient, ok := nn.BGradient[l].(OnlineGradientDescent)
 				if !ok {
 					panic("bGradient is not a OnlineGradientDescent")
@@ -186,6 +192,10 @@ func (nn *NeuralNetwork) TeachOnline(teacher teach.Teacher, epocs int) {
 				}
 				nn.Biases[l] = bGradient.ApplyDelta(nn.Biases[l], dB[l])
 				nn.Weights[l] = wGradient.ApplyDelta(nn.Weights[l], dW[l])
+				if nn.watcher != nil {
+					nn.watcher.UpdateBiases(l, nn.Biases[l])
+					nn.watcher.UpdateWeights(l, nn.Weights[l])
+				}
 			}
 		}
 		teacher.Reset()
@@ -196,7 +206,7 @@ func (nn *NeuralNetwork) TeachBatch(teacher teach.Teacher, epocs int) {
 	for t := 0; t < epocs; t++ {
 		batchWorkers := nn.runBatchWorkers(runtime.NumCPU(), teacher)
 
-		for l := 1; l < nn.LayerCount; l++ {
+		for l := 1; l < nn.layerCount; l++ {
 			bGradient, ok := nn.BGradient[l].(BatchGradientDescent)
 			if !ok {
 				panic("bGradient is not a BatchGradientDescent")
@@ -212,6 +222,10 @@ func (nn *NeuralNetwork) TeachBatch(teacher teach.Teacher, epocs int) {
 			}
 			nn.Biases[l] = bGradient.ApplyDelta(nn.Biases[l])
 			nn.Weights[l] = wGradient.ApplyDelta(nn.Weights[l])
+			if nn.watcher != nil {
+				nn.watcher.UpdateBiases(l, nn.Biases[l])
+				nn.watcher.UpdateWeights(l, nn.Weights[l])
+			}
 		}
 	}
 }
@@ -236,15 +250,15 @@ func (nn *NeuralNetwork) runBatchWorkers(threadCount int, teacher teach.Teacher)
 func (nn *NeuralNetwork) SaveState(writer io.Writer) {
 	//save input array count
 	bufferSize := make([]byte, 4)
-	binary.LittleEndian.PutUint32(bufferSize[0:], uint32(nn.LayerCount))
+	binary.LittleEndian.PutUint32(bufferSize[0:], uint32(nn.layerCount))
 	_, err := writer.Write(bufferSize)
 
 	check(err)
-	fmt.Printf("wrote value %d\n", uint32(nn.LayerCount))
+	fmt.Printf("wrote value %d\n", uint32(nn.layerCount))
 
 	// save an input array
-	buffer := make([]byte, nn.LayerCount*4)
-	for i := 0; i < nn.LayerCount; i++ {
+	buffer := make([]byte, nn.layerCount*4)
+	for i := 0; i < nn.layerCount; i++ {
 		binary.LittleEndian.PutUint32(buffer[i*4:], uint32(nn.Sizes[i]))
 	}
 
@@ -254,26 +268,26 @@ func (nn *NeuralNetwork) SaveState(writer io.Writer) {
 
 	//save biases
 	////////////////////////
-	for i := 1; i < nn.LayerCount; i++ {
+	for i := 1; i < nn.layerCount; i++ {
 		saveDense(writer, nn.Biases[i])
 	}
 
 	//save weights
 	////////////////////////
-	for i := 1; i < nn.LayerCount; i++ {
+	for i := 1; i < nn.layerCount; i++ {
 		saveDense(writer, nn.Weights[i])
 	}
 }
 
 func (nn *NeuralNetwork) LoadState(reader io.Reader) {
 	// Reade count
-	nn.LayerCount = readInt(reader)
+	nn.layerCount = readInt(reader)
 
 	// Read an input array
-	sizeBuffer := readByteArray(reader, nn.LayerCount*4)
-	nn.Sizes = make([]int, nn.LayerCount)
+	sizeBuffer := readByteArray(reader, nn.layerCount*4)
+	nn.Sizes = make([]int, nn.layerCount)
 
-	for i := 0; i < nn.LayerCount; i++ {
+	for i := 0; i < nn.layerCount; i++ {
 		nn.Sizes[i] = int(binary.LittleEndian.Uint32(sizeBuffer[i*4:]))
 		// fmt.Printf("LoadState: nn.Sizes[%d] %d \n", i, nn.Sizes[i])
 	}
@@ -283,14 +297,14 @@ func (nn *NeuralNetwork) LoadState(reader io.Reader) {
 
 	// read Biases
 	nn.Biases[0] = &mat.Dense{}
-	for i := 1; i < nn.LayerCount; i++ {
+	for i := 1; i < nn.layerCount; i++ {
 		nn.Biases = append(nn.Biases, &mat.Dense{})
 		nn.Biases[i] = readDense(reader, nn.Biases[i])
 	}
 
 	// read Weights
 	nn.Weights[0] = &mat.Dense{}
-	for i := 1; i < nn.LayerCount; i++ {
+	for i := 1; i < nn.layerCount; i++ {
 		nn.Weights = append(nn.Weights, &mat.Dense{})
 		nn.Weights[i] = readDense(reader, nn.Weights[i])
 	}
@@ -299,12 +313,16 @@ func (nn *NeuralNetwork) LoadState(reader io.Reader) {
 }
 
 func (nn NeuralNetwork) forward(aIn mat.Matrix) (A, Z []*mat.Dense) {
-	A = make([]*mat.Dense, nn.LayerCount)
-	Z = make([]*mat.Dense, nn.LayerCount)
+	A = make([]*mat.Dense, nn.layerCount)
+	Z = make([]*mat.Dense, nn.layerCount)
 
 	A[0] = mat.DenseCopyOf(aIn)
 
-	for l := 1; l < nn.LayerCount; l++ {
+	if nn.watcher != nil {
+		nn.watcher.UpdateActivations(0, A[0])
+	}
+
+	for l := 1; l < nn.layerCount; l++ {
 		A[l] = mat.NewDense(nn.Sizes[l], 1, nil)
 		aSrc := A[l-1]
 		aDst := A[l]
@@ -323,6 +341,9 @@ func (nn NeuralNetwork) forward(aIn mat.Matrix) (A, Z []*mat.Dense) {
 
 		// σ(W[l]*A[l−1]+B[l])
 		aDst.Apply(applySigmoid, aDst)
+		if nn.watcher != nil {
+			nn.watcher.UpdateActivations(l, aDst)
+		}
 	}
 	return
 }
@@ -332,9 +353,9 @@ func (nn NeuralNetwork) forward(aIn mat.Matrix) (A, Z []*mat.Dense) {
 func (nn NeuralNetwork) backward(aIn, aOut mat.Matrix) (dB, dW []*mat.Dense) {
 	A, Z := nn.forward(aIn)
 
-	lastLayerNum := nn.LayerCount - 1
-	dB = make([]*mat.Dense, nn.LayerCount)
-	dW = make([]*mat.Dense, nn.LayerCount)
+	lastLayerNum := nn.layerCount - 1
+	dB = make([]*mat.Dense, nn.layerCount)
+	dW = make([]*mat.Dense, nn.layerCount)
 
 	// To calculate new values of weights and biases
 	// following formulas are used:
@@ -348,7 +369,7 @@ func (nn NeuralNetwork) backward(aIn, aOut mat.Matrix) (dB, dW []*mat.Dense) {
 	// error = A[L]-y
 	// Where y is expected activations set
 	err := &mat.Dense{}
-	err.Sub(A[nn.LayerCount-1], aOut)
+	err.Sub(A[nn.layerCount-1], aOut)
 
 	// Calculate sigmoids prime σ'(Z[L]) for last layer L
 	sigmoidsPrime := &mat.Dense{}
@@ -377,7 +398,7 @@ func (nn NeuralNetwork) backward(aIn, aOut mat.Matrix) (dB, dW []*mat.Dense) {
 	// δ[l] = ((Wt[l+1])*δ[l+1])⊙σ'(Z[l])
 	// Where Wt[l+1] is transposed matrix of actual Weights from
 	// forward step
-	for l := nn.LayerCount - 2; l > 0; l-- {
+	for l := nn.layerCount - 2; l > 0; l-- {
 		// Calculate sigmoids prime σ'(Z[l]) for last layer l
 		sigmoidsPrime := &mat.Dense{}
 		sigmoidsPrime.Apply(applySigmoidPrime, Z[l])

+ 68 - 0
neuralnetwork/remotecontrol/remotecontrol.go

@@ -0,0 +1,68 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2019 Alexey Edelev <semlanik@gmail.com>
+ *
+ * This file is part of NeuralNetwork project https://git.semlanik.org/semlanik/NeuralNetwork
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of this
+ * software and associated documentation files (the "Software"), to deal in the Software
+ * without restriction, including without limitation the rights to use, copy, modify,
+ * merge, publish, distribute, sublicense, and/or sell copies of the Software, and
+ * to permit persons to whom the Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all copies
+ * or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+ * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+ * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+ * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+ * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ */
+
+package remotecontrol
+
+import (
+	"log"
+
+	neuralnetworkbase "../neuralnetworkbase"
+	"gonum.org/v1/gonum/mat"
+)
+
+type RemoteControl struct {
+}
+
+func (rw *RemoteControl) Init(nn *neuralnetworkbase.NeuralNetwork) {
+}
+
+func (rw *RemoteControl) UpdateActivations(l int, a *mat.Dense) {
+	// matrix := NewLayerMatrix(l, a, LayerMatrix_Activations)
+}
+
+func (rw *RemoteControl) UpdateBiases(l int, biases *mat.Dense) {
+	// matrix := NewLayerMatrix(l, biases, LayerMatrix_Biases)
+}
+
+func (rw *RemoteControl) UpdateWeights(l int, weights *mat.Dense) {
+	// matrix := NewLayerMatrix(l, weights, LayerMatrix_Weights)
+}
+
+func NewLayerMatrix(l int, dense *mat.Dense, contentType LayerMatrix_ContentType) (matrix *LayerMatrix) {
+	buffer, err := dense.MarshalBinary()
+	if err != nil {
+		log.Fatalln("Invalid dense is provided for remote control")
+	}
+
+	matrix = &LayerMatrix{
+		Matrix: &Matrix{
+			Matrix: buffer,
+		},
+		Layer:       int32(l),
+		ContentType: contentType,
+	}
+
+	return
+}

+ 557 - 0
neuralnetwork/remotecontrol/remotecontrol.pb.go

@@ -0,0 +1,557 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: remotecontrol.proto
+
+package remotecontrol
+
+import (
+	context "context"
+	fmt "fmt"
+	proto "github.com/golang/protobuf/proto"
+	grpc "google.golang.org/grpc"
+	codes "google.golang.org/grpc/codes"
+	status "google.golang.org/grpc/status"
+	math "math"
+)
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
+
+type LayerMatrix_ContentType int32
+
+const (
+	LayerMatrix_Activations LayerMatrix_ContentType = 0
+	LayerMatrix_Weights     LayerMatrix_ContentType = 1
+	LayerMatrix_Biases      LayerMatrix_ContentType = 2
+)
+
+var LayerMatrix_ContentType_name = map[int32]string{
+	0: "Activations",
+	1: "Weights",
+	2: "Biases",
+}
+
+var LayerMatrix_ContentType_value = map[string]int32{
+	"Activations": 0,
+	"Weights":     1,
+	"Biases":      2,
+}
+
+func (x LayerMatrix_ContentType) String() string {
+	return proto.EnumName(LayerMatrix_ContentType_name, int32(x))
+}
+
+func (LayerMatrix_ContentType) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_9e7470c0107e56c6, []int{1, 0}
+}
+
+type Matrix struct {
+	Matrix               []byte   `protobuf:"bytes,1,opt,name=matrix,proto3" json:"matrix,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *Matrix) Reset()         { *m = Matrix{} }
+func (m *Matrix) String() string { return proto.CompactTextString(m) }
+func (*Matrix) ProtoMessage()    {}
+func (*Matrix) Descriptor() ([]byte, []int) {
+	return fileDescriptor_9e7470c0107e56c6, []int{0}
+}
+
+func (m *Matrix) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Matrix.Unmarshal(m, b)
+}
+func (m *Matrix) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Matrix.Marshal(b, m, deterministic)
+}
+func (m *Matrix) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Matrix.Merge(m, src)
+}
+func (m *Matrix) XXX_Size() int {
+	return xxx_messageInfo_Matrix.Size(m)
+}
+func (m *Matrix) XXX_DiscardUnknown() {
+	xxx_messageInfo_Matrix.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Matrix proto.InternalMessageInfo
+
+func (m *Matrix) GetMatrix() []byte {
+	if m != nil {
+		return m.Matrix
+	}
+	return nil
+}
+
+type LayerMatrix struct {
+	ContentType          LayerMatrix_ContentType `protobuf:"varint,1,opt,name=contentType,proto3,enum=remotecontrol.LayerMatrix_ContentType" json:"contentType,omitempty"`
+	Layer                int32                   `protobuf:"zigzag32,2,opt,name=layer,proto3" json:"layer,omitempty"`
+	Matrix               *Matrix                 `protobuf:"bytes,3,opt,name=matrix,proto3" json:"matrix,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                `json:"-"`
+	XXX_unrecognized     []byte                  `json:"-"`
+	XXX_sizecache        int32                   `json:"-"`
+}
+
+func (m *LayerMatrix) Reset()         { *m = LayerMatrix{} }
+func (m *LayerMatrix) String() string { return proto.CompactTextString(m) }
+func (*LayerMatrix) ProtoMessage()    {}
+func (*LayerMatrix) Descriptor() ([]byte, []int) {
+	return fileDescriptor_9e7470c0107e56c6, []int{1}
+}
+
+func (m *LayerMatrix) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_LayerMatrix.Unmarshal(m, b)
+}
+func (m *LayerMatrix) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_LayerMatrix.Marshal(b, m, deterministic)
+}
+func (m *LayerMatrix) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_LayerMatrix.Merge(m, src)
+}
+func (m *LayerMatrix) XXX_Size() int {
+	return xxx_messageInfo_LayerMatrix.Size(m)
+}
+func (m *LayerMatrix) XXX_DiscardUnknown() {
+	xxx_messageInfo_LayerMatrix.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_LayerMatrix proto.InternalMessageInfo
+
+func (m *LayerMatrix) GetContentType() LayerMatrix_ContentType {
+	if m != nil {
+		return m.ContentType
+	}
+	return LayerMatrix_Activations
+}
+
+func (m *LayerMatrix) GetLayer() int32 {
+	if m != nil {
+		return m.Layer
+	}
+	return 0
+}
+
+func (m *LayerMatrix) GetMatrix() *Matrix {
+	if m != nil {
+		return m.Matrix
+	}
+	return nil
+}
+
+type Configuration struct {
+	Sizes                []int32  `protobuf:"zigzag32,1,rep,packed,name=sizes,proto3" json:"sizes,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *Configuration) Reset()         { *m = Configuration{} }
+func (m *Configuration) String() string { return proto.CompactTextString(m) }
+func (*Configuration) ProtoMessage()    {}
+func (*Configuration) Descriptor() ([]byte, []int) {
+	return fileDescriptor_9e7470c0107e56c6, []int{2}
+}
+
+func (m *Configuration) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Configuration.Unmarshal(m, b)
+}
+func (m *Configuration) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Configuration.Marshal(b, m, deterministic)
+}
+func (m *Configuration) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Configuration.Merge(m, src)
+}
+func (m *Configuration) XXX_Size() int {
+	return xxx_messageInfo_Configuration.Size(m)
+}
+func (m *Configuration) XXX_DiscardUnknown() {
+	xxx_messageInfo_Configuration.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Configuration proto.InternalMessageInfo
+
+func (m *Configuration) GetSizes() []int32 {
+	if m != nil {
+		return m.Sizes
+	}
+	return nil
+}
+
+type None struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *None) Reset()         { *m = None{} }
+func (m *None) String() string { return proto.CompactTextString(m) }
+func (*None) ProtoMessage()    {}
+func (*None) Descriptor() ([]byte, []int) {
+	return fileDescriptor_9e7470c0107e56c6, []int{3}
+}
+
+func (m *None) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_None.Unmarshal(m, b)
+}
+func (m *None) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_None.Marshal(b, m, deterministic)
+}
+func (m *None) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_None.Merge(m, src)
+}
+func (m *None) XXX_Size() int {
+	return xxx_messageInfo_None.Size(m)
+}
+func (m *None) XXX_DiscardUnknown() {
+	xxx_messageInfo_None.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_None proto.InternalMessageInfo
+
+func init() {
+	proto.RegisterEnum("remotecontrol.LayerMatrix_ContentType", LayerMatrix_ContentType_name, LayerMatrix_ContentType_value)
+	proto.RegisterType((*Matrix)(nil), "remotecontrol.Matrix")
+	proto.RegisterType((*LayerMatrix)(nil), "remotecontrol.LayerMatrix")
+	proto.RegisterType((*Configuration)(nil), "remotecontrol.Configuration")
+	proto.RegisterType((*None)(nil), "remotecontrol.None")
+}
+
+func init() { proto.RegisterFile("remotecontrol.proto", fileDescriptor_9e7470c0107e56c6) }
+
+var fileDescriptor_9e7470c0107e56c6 = []byte{
+	// 313 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x92, 0xc1, 0x4b, 0xc3, 0x30,
+	0x14, 0xc6, 0x9b, 0x4d, 0x3b, 0x78, 0x71, 0x5a, 0x33, 0x95, 0x51, 0x3c, 0x94, 0x80, 0xd2, 0x8b,
+	0x45, 0xea, 0x41, 0x3c, 0x28, 0xb8, 0x1e, 0xf4, 0xa0, 0x22, 0x45, 0xf0, 0x5c, 0x6b, 0xac, 0x81,
+	0xad, 0x19, 0x49, 0x14, 0xe7, 0xff, 0xa9, 0x7f, 0x8f, 0x34, 0x9d, 0xb5, 0x2d, 0xf5, 0xb2, 0x5b,
+	0xde, 0xe3, 0xfb, 0xbe, 0xf7, 0xcb, 0x4b, 0x60, 0x24, 0xd9, 0x4c, 0x68, 0x96, 0x8a, 0x5c, 0x4b,
+	0x31, 0x0d, 0xe6, 0x52, 0x68, 0x41, 0x86, 0x8d, 0x26, 0xf5, 0xc0, 0xbe, 0x4d, 0xb4, 0xe4, 0x1f,
+	0x64, 0x0f, 0xec, 0x99, 0x39, 0x8d, 0x91, 0x87, 0xfc, 0x8d, 0x78, 0x59, 0xd1, 0x2f, 0x04, 0xf8,
+	0x26, 0x59, 0x30, 0xb9, 0xd4, 0x5d, 0x03, 0x2e, 0xcc, 0x2c, 0xd7, 0x0f, 0x8b, 0x39, 0x33, 0xe2,
+	0xcd, 0xf0, 0x30, 0x68, 0xce, 0xaa, 0x19, 0x82, 0xe8, 0x4f, 0x1d, 0xd7, 0xad, 0x64, 0x07, 0xd6,
+	0xa7, 0x85, 0x6e, 0xdc, 0xf3, 0x90, 0xbf, 0x1d, 0x97, 0x05, 0x39, 0xaa, 0x38, 0xfa, 0x1e, 0xf2,
+	0x71, 0xb8, 0xdb, 0x8a, 0x2e, 0x53, 0x2b, 0xbc, 0x53, 0xc0, 0xb5, 0x01, 0x64, 0x0b, 0xf0, 0x65,
+	0xaa, 0xf9, 0x7b, 0xa2, 0xb9, 0xc8, 0x95, 0x63, 0x11, 0x0c, 0x83, 0x47, 0xc6, 0xb3, 0x57, 0xad,
+	0x1c, 0x44, 0x00, 0xec, 0x09, 0x4f, 0x14, 0x53, 0x4e, 0x8f, 0x1e, 0xc0, 0x30, 0x12, 0xf9, 0x0b,
+	0xcf, 0xde, 0xa4, 0x11, 0x17, 0x38, 0x8a, 0x7f, 0x32, 0x35, 0x46, 0x5e, 0xbf, 0xc0, 0x31, 0x05,
+	0xb5, 0x61, 0xed, 0x4e, 0xe4, 0x2c, 0xfc, 0xee, 0xc1, 0x30, 0x36, 0x20, 0x51, 0x09, 0x42, 0xae,
+	0xc0, 0xc9, 0x98, 0x6e, 0x66, 0x8c, 0x5a, 0xb0, 0x85, 0xd5, 0xdd, 0x6f, 0x35, 0x1b, 0x16, 0x6a,
+	0x91, 0x49, 0x83, 0xb9, 0x3b, 0xc3, 0xfd, 0x7f, 0xc1, 0xd4, 0x3a, 0x46, 0xe4, 0xfc, 0xf7, 0x66,
+	0xab, 0xd9, 0x2f, 0xaa, 0x2d, 0xad, 0xe6, 0x3f, 0x83, 0xc1, 0xbd, 0x64, 0xcf, 0x3c, 0xd5, 0xa4,
+	0xfb, 0xbd, 0xdc, 0xee, 0x36, 0xb5, 0x9e, 0x6c, 0xf3, 0x2f, 0x4f, 0x7e, 0x02, 0x00, 0x00, 0xff,
+	0xff, 0x89, 0xa6, 0x82, 0x97, 0xae, 0x02, 0x00, 0x00,
+}
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ context.Context
+var _ grpc.ClientConn
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the grpc package it is being compiled against.
+const _ = grpc.SupportPackageIsVersion4
+
+// RemoteControlClient is the client API for RemoteControl service.
+//
+// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
+type RemoteControlClient interface {
+	GetConfiguration(ctx context.Context, in *None, opts ...grpc.CallOption) (*Configuration, error)
+	Activations(ctx context.Context, in *None, opts ...grpc.CallOption) (RemoteControl_ActivationsClient, error)
+	Biases(ctx context.Context, in *None, opts ...grpc.CallOption) (RemoteControl_BiasesClient, error)
+	Weights(ctx context.Context, in *None, opts ...grpc.CallOption) (RemoteControl_WeightsClient, error)
+	Predict(ctx context.Context, in *Matrix, opts ...grpc.CallOption) (*Matrix, error)
+}
+
+type remoteControlClient struct {
+	cc *grpc.ClientConn
+}
+
+func NewRemoteControlClient(cc *grpc.ClientConn) RemoteControlClient {
+	return &remoteControlClient{cc}
+}
+
+func (c *remoteControlClient) GetConfiguration(ctx context.Context, in *None, opts ...grpc.CallOption) (*Configuration, error) {
+	out := new(Configuration)
+	err := c.cc.Invoke(ctx, "/remotecontrol.RemoteControl/getConfiguration", in, out, opts...)
+	if err != nil {
+		return nil, err
+	}
+	return out, nil
+}
+
+func (c *remoteControlClient) Activations(ctx context.Context, in *None, opts ...grpc.CallOption) (RemoteControl_ActivationsClient, error) {
+	stream, err := c.cc.NewStream(ctx, &_RemoteControl_serviceDesc.Streams[0], "/remotecontrol.RemoteControl/Activations", opts...)
+	if err != nil {
+		return nil, err
+	}
+	x := &remoteControlActivationsClient{stream}
+	if err := x.ClientStream.SendMsg(in); err != nil {
+		return nil, err
+	}
+	if err := x.ClientStream.CloseSend(); err != nil {
+		return nil, err
+	}
+	return x, nil
+}
+
+type RemoteControl_ActivationsClient interface {
+	Recv() (*LayerMatrix, error)
+	grpc.ClientStream
+}
+
+type remoteControlActivationsClient struct {
+	grpc.ClientStream
+}
+
+func (x *remoteControlActivationsClient) Recv() (*LayerMatrix, error) {
+	m := new(LayerMatrix)
+	if err := x.ClientStream.RecvMsg(m); err != nil {
+		return nil, err
+	}
+	return m, nil
+}
+
+func (c *remoteControlClient) Biases(ctx context.Context, in *None, opts ...grpc.CallOption) (RemoteControl_BiasesClient, error) {
+	stream, err := c.cc.NewStream(ctx, &_RemoteControl_serviceDesc.Streams[1], "/remotecontrol.RemoteControl/Biases", opts...)
+	if err != nil {
+		return nil, err
+	}
+	x := &remoteControlBiasesClient{stream}
+	if err := x.ClientStream.SendMsg(in); err != nil {
+		return nil, err
+	}
+	if err := x.ClientStream.CloseSend(); err != nil {
+		return nil, err
+	}
+	return x, nil
+}
+
+type RemoteControl_BiasesClient interface {
+	Recv() (*LayerMatrix, error)
+	grpc.ClientStream
+}
+
+type remoteControlBiasesClient struct {
+	grpc.ClientStream
+}
+
+func (x *remoteControlBiasesClient) Recv() (*LayerMatrix, error) {
+	m := new(LayerMatrix)
+	if err := x.ClientStream.RecvMsg(m); err != nil {
+		return nil, err
+	}
+	return m, nil
+}
+
+func (c *remoteControlClient) Weights(ctx context.Context, in *None, opts ...grpc.CallOption) (RemoteControl_WeightsClient, error) {
+	stream, err := c.cc.NewStream(ctx, &_RemoteControl_serviceDesc.Streams[2], "/remotecontrol.RemoteControl/Weights", opts...)
+	if err != nil {
+		return nil, err
+	}
+	x := &remoteControlWeightsClient{stream}
+	if err := x.ClientStream.SendMsg(in); err != nil {
+		return nil, err
+	}
+	if err := x.ClientStream.CloseSend(); err != nil {
+		return nil, err
+	}
+	return x, nil
+}
+
+type RemoteControl_WeightsClient interface {
+	Recv() (*LayerMatrix, error)
+	grpc.ClientStream
+}
+
+type remoteControlWeightsClient struct {
+	grpc.ClientStream
+}
+
+func (x *remoteControlWeightsClient) Recv() (*LayerMatrix, error) {
+	m := new(LayerMatrix)
+	if err := x.ClientStream.RecvMsg(m); err != nil {
+		return nil, err
+	}
+	return m, nil
+}
+
+func (c *remoteControlClient) Predict(ctx context.Context, in *Matrix, opts ...grpc.CallOption) (*Matrix, error) {
+	out := new(Matrix)
+	err := c.cc.Invoke(ctx, "/remotecontrol.RemoteControl/Predict", in, out, opts...)
+	if err != nil {
+		return nil, err
+	}
+	return out, nil
+}
+
+// RemoteControlServer is the server API for RemoteControl service.
+type RemoteControlServer interface {
+	GetConfiguration(context.Context, *None) (*Configuration, error)
+	Activations(*None, RemoteControl_ActivationsServer) error
+	Biases(*None, RemoteControl_BiasesServer) error
+	Weights(*None, RemoteControl_WeightsServer) error
+	Predict(context.Context, *Matrix) (*Matrix, error)
+}
+
+// UnimplementedRemoteControlServer can be embedded to have forward compatible implementations.
+type UnimplementedRemoteControlServer struct {
+}
+
+func (*UnimplementedRemoteControlServer) GetConfiguration(ctx context.Context, req *None) (*Configuration, error) {
+	return nil, status.Errorf(codes.Unimplemented, "method GetConfiguration not implemented")
+}
+func (*UnimplementedRemoteControlServer) Activations(req *None, srv RemoteControl_ActivationsServer) error {
+	return status.Errorf(codes.Unimplemented, "method Activations not implemented")
+}
+func (*UnimplementedRemoteControlServer) Biases(req *None, srv RemoteControl_BiasesServer) error {
+	return status.Errorf(codes.Unimplemented, "method Biases not implemented")
+}
+func (*UnimplementedRemoteControlServer) Weights(req *None, srv RemoteControl_WeightsServer) error {
+	return status.Errorf(codes.Unimplemented, "method Weights not implemented")
+}
+func (*UnimplementedRemoteControlServer) Predict(ctx context.Context, req *Matrix) (*Matrix, error) {
+	return nil, status.Errorf(codes.Unimplemented, "method Predict not implemented")
+}
+
+func RegisterRemoteControlServer(s *grpc.Server, srv RemoteControlServer) {
+	s.RegisterService(&_RemoteControl_serviceDesc, srv)
+}
+
+func _RemoteControl_GetConfiguration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+	in := new(None)
+	if err := dec(in); err != nil {
+		return nil, err
+	}
+	if interceptor == nil {
+		return srv.(RemoteControlServer).GetConfiguration(ctx, in)
+	}
+	info := &grpc.UnaryServerInfo{
+		Server:     srv,
+		FullMethod: "/remotecontrol.RemoteControl/GetConfiguration",
+	}
+	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+		return srv.(RemoteControlServer).GetConfiguration(ctx, req.(*None))
+	}
+	return interceptor(ctx, in, info, handler)
+}
+
+func _RemoteControl_Activations_Handler(srv interface{}, stream grpc.ServerStream) error {
+	m := new(None)
+	if err := stream.RecvMsg(m); err != nil {
+		return err
+	}
+	return srv.(RemoteControlServer).Activations(m, &remoteControlActivationsServer{stream})
+}
+
+type RemoteControl_ActivationsServer interface {
+	Send(*LayerMatrix) error
+	grpc.ServerStream
+}
+
+type remoteControlActivationsServer struct {
+	grpc.ServerStream
+}
+
+func (x *remoteControlActivationsServer) Send(m *LayerMatrix) error {
+	return x.ServerStream.SendMsg(m)
+}
+
+func _RemoteControl_Biases_Handler(srv interface{}, stream grpc.ServerStream) error {
+	m := new(None)
+	if err := stream.RecvMsg(m); err != nil {
+		return err
+	}
+	return srv.(RemoteControlServer).Biases(m, &remoteControlBiasesServer{stream})
+}
+
+type RemoteControl_BiasesServer interface {
+	Send(*LayerMatrix) error
+	grpc.ServerStream
+}
+
+type remoteControlBiasesServer struct {
+	grpc.ServerStream
+}
+
+func (x *remoteControlBiasesServer) Send(m *LayerMatrix) error {
+	return x.ServerStream.SendMsg(m)
+}
+
+func _RemoteControl_Weights_Handler(srv interface{}, stream grpc.ServerStream) error {
+	m := new(None)
+	if err := stream.RecvMsg(m); err != nil {
+		return err
+	}
+	return srv.(RemoteControlServer).Weights(m, &remoteControlWeightsServer{stream})
+}
+
+type RemoteControl_WeightsServer interface {
+	Send(*LayerMatrix) error
+	grpc.ServerStream
+}
+
+type remoteControlWeightsServer struct {
+	grpc.ServerStream
+}
+
+func (x *remoteControlWeightsServer) Send(m *LayerMatrix) error {
+	return x.ServerStream.SendMsg(m)
+}
+
+func _RemoteControl_Predict_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+	in := new(Matrix)
+	if err := dec(in); err != nil {
+		return nil, err
+	}
+	if interceptor == nil {
+		return srv.(RemoteControlServer).Predict(ctx, in)
+	}
+	info := &grpc.UnaryServerInfo{
+		Server:     srv,
+		FullMethod: "/remotecontrol.RemoteControl/Predict",
+	}
+	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+		return srv.(RemoteControlServer).Predict(ctx, req.(*Matrix))
+	}
+	return interceptor(ctx, in, info, handler)
+}
+
+var _RemoteControl_serviceDesc = grpc.ServiceDesc{
+	ServiceName: "remotecontrol.RemoteControl",
+	HandlerType: (*RemoteControlServer)(nil),
+	Methods: []grpc.MethodDesc{
+		{
+			MethodName: "getConfiguration",
+			Handler:    _RemoteControl_GetConfiguration_Handler,
+		},
+		{
+			MethodName: "Predict",
+			Handler:    _RemoteControl_Predict_Handler,
+		},
+	},
+	Streams: []grpc.StreamDesc{
+		{
+			StreamName:    "Activations",
+			Handler:       _RemoteControl_Activations_Handler,
+			ServerStreams: true,
+		},
+		{
+			StreamName:    "Biases",
+			Handler:       _RemoteControl_Biases_Handler,
+			ServerStreams: true,
+		},
+		{
+			StreamName:    "Weights",
+			Handler:       _RemoteControl_Weights_Handler,
+			ServerStreams: true,
+		},
+	},
+	Metadata: "remotecontrol.proto",
+}

+ 58 - 0
neuralnetwork/remotecontrol/remotecontrol.proto

@@ -0,0 +1,58 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2019 Alexey Edelev <semlanik@gmail.com>
+ *
+ * This file is part of NeuralNetwork project https://git.semlanik.org/semlanik/NeuralNetwork
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of this
+ * software and associated documentation files (the "Software"), to deal in the Software
+ * without restriction, including without limitation the rights to use, copy, modify,
+ * merge, publish, distribute, sublicense, and/or sell copies of the Software, and
+ * to permit persons to whom the Software is furnished to do so, subject to the following
+ * conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all copies
+ * or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+ * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+ * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+ * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+ * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ */
+
+ syntax="proto3";
+
+package remotecontrol;
+
+message Matrix {
+    bytes matrix = 1;
+}
+
+message LayerMatrix {
+    enum ContentType {
+        Activations = 0;
+        Weights = 1;
+        Biases = 2;
+    };
+    ContentType contentType = 1;
+    sint32 layer = 2;
+    Matrix matrix = 3;
+}
+
+message Configuration {
+    repeated sint32 sizes = 1;
+}
+
+message None {
+}
+
+service RemoteControl {
+    rpc getConfiguration(None) returns (Configuration) {}
+    rpc Activations(None) returns (stream LayerMatrix) {}
+    rpc Biases(None) returns (stream LayerMatrix) {}
+    rpc Weights(None) returns (stream LayerMatrix) {}
+    rpc Predict(Matrix) returns (Matrix) {}
+}