main.go 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687
  1. /*
  2. * MIT License
  3. *
  4. * Copyright (c) 2019 Alexey Edelev <semlanik@gmail.com>
  5. *
  6. * This file is part of NeuralNetwork project https://git.semlanik.org/semlanik/NeuralNetwork
  7. *
  8. * Permission is hereby granted, free of charge, to any person obtaining a copy of this
  9. * software and associated documentation files (the "Software"), to deal in the Software
  10. * without restriction, including without limitation the rights to use, copy, modify,
  11. * merge, publish, distribute, sublicense, and/or sell copies of the Software, and
  12. * to permit persons to whom the Software is furnished to do so, subject to the following
  13. * conditions:
  14. *
  15. * The above copyright notice and this permission notice shall be included in all copies
  16. * or substantial portions of the Software.
  17. *
  18. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
  19. * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
  20. * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
  21. * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
  22. * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
  23. * DEALINGS IN THE SOFTWARE.
  24. */
  25. package main
  26. import (
  27. "git.semlanik.org/semlanik/NeuralNetwork/neuralnetwork"
  28. "git.semlanik.org/semlanik/NeuralNetwork/neuralnetwork/gradients"
  29. "git.semlanik.org/semlanik/NeuralNetwork/remotecontrol"
  30. )
  31. func main() {
  32. rc := remotecontrol.NewRemoteControl()
  33. sizes := []int{13, 8, 12, 3}
  34. nn, _ := neuralnetwork.NewNeuralNetwork(sizes, gradients.NewRPropInitializer(gradients.RPropConfig{
  35. NuPlus: 1.2,
  36. NuMinus: 0.5,
  37. DeltaMax: 50.0,
  38. DeltaMin: 0.000001,
  39. }))
  40. nn.SetStateWatcher(rc)
  41. rc.Run()
  42. // inFile, err := os.Open("./networkstate")
  43. // if err != nil {
  44. // log.Fatal(err)
  45. // }
  46. // defer inFile.Close()
  47. // nn.LoadState(inFile)
  48. // nn, _ := neuralnetwork.NewNeuralNetwork(sizes, neuralnetwork.NewBackPropInitializer(0.1))
  49. // for i := 0; i < nn.Count; i++ {
  50. // if i > 0 {
  51. // fmt.Printf("Weights before:\n%v\n\n", mat.Formatted(nn.Weights[i], mat.Prefix(""), mat.Excerpt(0)))
  52. // fmt.Printf("Biases before:\n%v\n\n", mat.Formatted(nn.Biases[i], mat.Prefix(""), mat.Excerpt(0)))
  53. // fmt.Printf("Z before:\n%v\n\n", mat.Formatted(nn.Z[i], mat.Prefix(""), mat.Excerpt(0)))
  54. // }
  55. // fmt.Printf("A before:\n%v\n\n", mat.Formatted(nn.A[i], mat.Prefix(""), mat.Excerpt(0)))
  56. // }
  57. // nn = &neuralnetwork.NeuralNetwork{}
  58. // inFile, err := os.Open("./data")
  59. // if err != nil {
  60. // log.Fatal(err)
  61. // }
  62. // defer inFile.Close()
  63. // nn.LoadState(inFile)
  64. // inFile.Close()
  65. // failCount = 0
  66. // training.Reset()
  67. // for training.NextValidator() {
  68. // dataSet, expect := training.GetValidator()
  69. // index, _ := nn.Predict(dataSet)
  70. // if expect.At(index, 0) != 1.0 {
  71. // failCount++
  72. // // fmt.Printf("Fail: %v, %v\n\n", training.ValidationIndex(), expect.At(index, 0))
  73. // }
  74. // }
  75. // fmt.Printf("Fail count: %v\n\n", failCount)
  76. }