Imported existing sources

This commit is contained in:
0xFEEDC0DE64
2018-09-15 20:51:15 +02:00
parent 09639315d6
commit 2a15afd678
17 changed files with 871 additions and 0 deletions

35
DbNeuralNet.pro Normal file
View File

@@ -0,0 +1,35 @@
QT += core
QT -= gui widgets
DBLIBS +=
TARGET = neuralnet
PROJECT_ROOT = ..
SOURCES += main.cpp \
neuron.cpp \
neurallayer.cpp \
neuralnet.cpp \
neuralfactor.cpp \
debug.cpp
HEADERS += \
neuralfactor.h \
interfaces/ineuronreceptor.h \
interfaces/ineuronsignal.h \
interfaces/ineuron.h \
interfaces/ineurallayer.h \
interfaces/ineuralnet.h \
neuron.h \
neurallayer.h \
neuralnet.h \
debug.h
FORMS +=
RESOURCES +=
TRANSLATIONS +=
include($${PROJECT_ROOT}/app.pri)

25
debug.cpp Normal file
View File

@@ -0,0 +1,25 @@
#include "debug.h"
#include <QFile>
#include <QTextStream>
#include <QDebug>
#include <QString>
QFile file("log.txt");
QTextStream textStream(&file);
bool initDebug()
{
if(!file.open(QIODevice::Truncate | QIODevice::WriteOnly | QIODevice::Text))
{
qWarning() << file.errorString();
return false;
}
return true;
}
void debug(const QString &line)
{
textStream << line << endl;
}

9
debug.h Normal file
View File

@@ -0,0 +1,9 @@
#ifndef DEBUG_H
#define DEBUG_H
class QString;
bool initDebug();
void debug(const QString &line);
#endif // DEBUG_H

21
interfaces/ineurallayer.h Normal file
View File

@@ -0,0 +1,21 @@
#ifndef INEURALLAYER_H
#define INEURALLAYER_H
#include <QList>
class INeuron;
class INeuralNet;
class INeuralLayer
{
public:
virtual void pulse() = 0;
virtual void applyLearning() = 0;
virtual void initializeLearning() = 0;
virtual INeuron *addNeuron(qreal bias) = 0;
virtual QList<INeuron *> &neurons() = 0;
virtual const QList<INeuron *> &neurons() const = 0;
};
#endif // INEURALLAYER_H

23
interfaces/ineuralnet.h Normal file
View File

@@ -0,0 +1,23 @@
#ifndef INEURALNET_H
#define INEURALNET_H
#include <qglobal.h>
class INeuralLayer;
class INeuralNet
{
public:
virtual void pulse() = 0;
virtual void applyLearning() = 0;
virtual void initializeLearning() = 0;
virtual qreal learningRate() const = 0;
virtual void setLearningRate(qreal learningRate) = 0;
virtual const INeuralLayer *inputLayer() const = 0;
virtual const INeuralLayer *hiddenLayer() const = 0;
virtual const INeuralLayer *outputLayer() const = 0;
};
#endif // INEURALNET_H

24
interfaces/ineuron.h Normal file
View File

@@ -0,0 +1,24 @@
#ifndef INEURON_H
#define INEURON_H
#include "ineuronsignal.h"
#include "ineuronreceptor.h"
class INeuralLayer;
class NeuralFactor;
class INeuron : public INeuronSignal, public INeuronRepetor
{
public:
virtual void pulse() = 0;
virtual void applyLearning(qreal learningRate) = 0;
virtual void initializeLearning() = 0;
virtual qreal error() const = 0;
virtual void setError(qreal error) = 0;
virtual NeuralFactor *bias() = 0;
virtual const NeuralFactor *bias() const = 0;
};
#endif // INEURON_H

View File

@@ -0,0 +1,16 @@
#ifndef INEURONRECEPTOR_H
#define INEURONRECEPTOR_H
template <class Key, class T> class QMap;
class INeuronSignal;
class NeuralFactor;
class INeuronRepetor
{
public:
virtual QMap<INeuronSignal *, NeuralFactor *> &input() = 0;
virtual const QMap<INeuronSignal *, NeuralFactor *> &input() const = 0;
};
#endif // INEURONRECEPTOR_H

View File

@@ -0,0 +1,13 @@
#ifndef INEURONSIGNAL_H
#define INEURONSIGNAL_H
#include <qglobal.h>
class INeuronSignal
{
public:
virtual qreal output() const = 0;
virtual void setOutput(qreal output) = 0;
};
#endif // INEURONSIGNAL_H

92
main.cpp Normal file
View File

@@ -0,0 +1,92 @@
#include <QCoreApplication>
#include <QQueue>
#include <QList>
#include <QVector>
#include <QDebug>
#include "debug.h"
#include "neuralnet.h"
#include "interfaces/ineurallayer.h"
#include "interfaces/ineuron.h"
const qreal low = .01;
const qreal mid = .5;
const qreal high = .99;
const int iterations = 5;
int main(int argc, char *argv[])
{
QCoreApplication a(argc, argv);
if(!initDebug())
return -1;
QQueue<qreal> random;
random.enqueue(0.248668584157093);
random.enqueue(0.110743977181029);
random.enqueue(0.467010679872246);
random.enqueue(0.771604122021982);
random.enqueue(0.657518893786482);
random.enqueue(0.432782601300991);
random.enqueue(0.354083763600366);
random.enqueue(0.943862276125635);
random.enqueue(0.101266453555444);
NeuralNet net(2, 2, 1, random);
QList<QVector<qreal> > input {
QVector<qreal> { high, high },
QVector<qreal> { low, high },
QVector<qreal> { high, low },
QVector<qreal> { low, low }
};
QList<QVector<qreal> > output {
QVector<qreal> { low },
QVector<qreal> { high },
QVector<qreal> { high },
QVector<qreal> { low }
};
int count = 0;
qreal hh, lh, hl, ll;
do
{
net.setLearningRate(3);
net.trainBackPropagation(input, output, iterations);
net.inputLayer()->neurons().at(0)->setOutput(high);
net.inputLayer()->neurons().at(1)->setOutput(high);
net.pulse();
hh = net.outputLayer()->neurons().at(0)->output();
net.inputLayer()->neurons().at(0)->setOutput(low);
net.inputLayer()->neurons().at(1)->setOutput(high);
net.pulse();
lh = net.outputLayer()->neurons().at(0)->output();
net.inputLayer()->neurons().at(0)->setOutput(high);
net.inputLayer()->neurons().at(1)->setOutput(low);
net.pulse();
hl = net.outputLayer()->neurons().at(0)->output();
net.inputLayer()->neurons().at(0)->setOutput(low);
net.inputLayer()->neurons().at(1)->setOutput(low);
net.pulse();
ll = net.outputLayer()->neurons().at(0)->output();
debug(QString("round %0: hh=%1 lh=%2 hl=%3 ll=%4").arg(count++).arg(hh, 0, 'f', 8).arg(lh, 0, 'f', 8).arg(hl, 0, 'f', 8).arg(ll, 0, 'f', 8));
}
while(hh > (mid + low) / 2.0 ||
lh < (mid + high) / 2.0 ||
hl < (mid + low) / 2.0 ||
ll > (mid + high) / 2);
qDebug() << count * iterations << "iterations";
qDebug() << "hh" << hh;
qDebug() << "lh" << lh;
qDebug() << "hl" << hl;
qDebug() << "ll" << ll;
return 0;
}

51
neuralfactor.cpp Normal file
View File

@@ -0,0 +1,51 @@
#include "neuralfactor.h"
#include "debug.h"
NeuralFactor::NeuralFactor(qreal weight, QObject *parent) :
QObject(parent),
m_weight(weight),
m_delta(0.0)
{
debug(QString("NeuralFactor::NeuralFactor() %0").arg(weight, 0, 'f', 8));
}
void NeuralFactor::applyWeightChange(qreal learningRate)
{
debug(QString("NeuralFactor::applyWeightChange() %0").arg(learningRate, 0, 'f', 8));
m_weight += m_delta * learningRate;
}
void NeuralFactor::resetWeightChange()
{
debug(QString("NeuralFactor::resetWeightChange()"));
m_delta = 0.0;
}
qreal NeuralFactor::weight() const
{
return m_weight;
}
void NeuralFactor::setWeight(qreal weight)
{
debug(QString("NeuralFactor::setWeight() %0").arg(weight, 0, 'f', 8));
if(m_weight != weight)
Q_EMIT weightChanged(m_weight = weight);
}
qreal NeuralFactor::delta() const
{
return m_delta;
}
void NeuralFactor::setDelta(qreal delta)
{
debug(QString("NeuralFactor::setDelta() %0").arg(delta, 0, 'f', 8));
if(m_delta != delta)
Q_EMIT deltaChanged(m_delta = delta);
}

36
neuralfactor.h Normal file
View File

@@ -0,0 +1,36 @@
#ifndef NEURALFACTOR_H
#define NEURALFACTOR_H
#include <QObject>
#include <qglobal.h>
class Neuron;
class NeuralFactor : public QObject
{
Q_OBJECT
Q_PROPERTY(qreal weight READ weight WRITE setWeight NOTIFY weightChanged)
Q_PROPERTY(qreal delta READ delta WRITE setDelta NOTIFY deltaChanged)
public:
explicit NeuralFactor(qreal weight, QObject *parent = Q_NULLPTR);
void applyWeightChange(qreal learningRate);
void resetWeightChange();
qreal weight() const;
void setWeight(qreal weight);
qreal delta() const;
void setDelta(qreal delta);
Q_SIGNALS:
void weightChanged(qreal weight);
void deltaChanged(qreal delta);
private:
qreal m_weight;
qreal m_delta;
};
#endif // NEURALFACTOR_H

61
neurallayer.cpp Normal file
View File

@@ -0,0 +1,61 @@
#include "neurallayer.h"
#include "neuralnet.h"
#include "debug.h"
#include "neuron.h"
NeuralLayer::NeuralLayer(NeuralNet *neuralNet) :
QObject(neuralNet),
INeuralLayer(),
m_neuralNet(neuralNet),
m_neurons()
{
debug(QString("NeuralLayer::NeuralLayer()"));
}
void NeuralLayer::pulse()
{
debug(QString("NeuralLayer::pulse()"));
for(auto neuron : m_neurons)
neuron->pulse();
}
void NeuralLayer::applyLearning()
{
debug(QString("NeuralLayer::applyLearning()"));
qreal learningRate = m_neuralNet->learningRate();
debug(QString("NeuralLayer::applyLearning() learningRate = %0").arg(learningRate, 0, 'f', 8));
for(auto neuron : m_neurons)
neuron->applyLearning(learningRate);
}
void NeuralLayer::initializeLearning()
{
debug(QString("NeuralLayer::initializeLearning()"));
for(auto neuron : m_neurons)
neuron->initializeLearning();
}
INeuron *NeuralLayer::addNeuron(qreal bias)
{
debug(QString("NeuralLayer::addNeuron() %0").arg(bias, 0, 'f', 8));
auto neuron = new Neuron(bias, this);
m_neurons << neuron;
return neuron;
}
QList<INeuron *> &NeuralLayer::neurons()
{
return m_neurons;
}
const QList<INeuron *> &NeuralLayer::neurons() const
{
return m_neurons;
}

33
neurallayer.h Normal file
View File

@@ -0,0 +1,33 @@
#ifndef NEURALLAYER_H
#define NEURALLAYER_H
#include <QObject>
#include <QList>
#include "interfaces/ineurallayer.h"
class NeuralNet;
class INeuron;
class NeuralLayer : public QObject, public INeuralLayer
{
Q_OBJECT
public:
explicit NeuralLayer(NeuralNet *neuralNet);
// INeuralLayer interface
void pulse() Q_DECL_OVERRIDE;
void applyLearning() Q_DECL_OVERRIDE;
void initializeLearning() Q_DECL_OVERRIDE;
INeuron *addNeuron(qreal bias);
QList<INeuron *> &neurons() Q_DECL_OVERRIDE;
const QList<INeuron *> &neurons() const Q_DECL_OVERRIDE;
private:
NeuralNet *m_neuralNet;
QList<INeuron *> m_neurons;
};
#endif // NEURALLAYER_H

215
neuralnet.cpp Normal file
View File

@@ -0,0 +1,215 @@
#include "neuralnet.h"
#include <QQueue>
#include <QList>
#include <QVector>
#include "neurallayer.h"
#include "debug.h"
#include "neuron.h"
#include "neuralfactor.h"
NeuralNet::NeuralNet(int inputNeuronCount, int hiddenNeuronCount, int outputNeuronCount, QQueue<qreal> &random, QObject *parent) :
QObject(parent),
INeuralNet(),
m_learningRate(0.5),
m_inputLayer(new NeuralLayer(this)),
m_hiddenLayer(new NeuralLayer(this)),
m_outputLayer(new NeuralLayer(this))
{
debug(QString("NeuralNet::NeuralNet() %0 %1 %2").arg(inputNeuronCount).arg(hiddenNeuronCount).arg(outputNeuronCount));
Q_ASSERT(random.count() == hiddenNeuronCount + outputNeuronCount + (hiddenNeuronCount * inputNeuronCount) + (outputNeuronCount * hiddenNeuronCount));
for (int i = 0; i < inputNeuronCount; i++)
m_inputLayer->addNeuron(0);
for (int i = 0; i < outputNeuronCount; i++)
m_outputLayer->addNeuron(random.dequeue());
for (int i = 0; i < hiddenNeuronCount; i++)
m_hiddenLayer->addNeuron(random.dequeue());
for(auto hiddenNeuron : m_hiddenLayer->neurons())
for(auto inputNeuron : m_inputLayer->neurons())
hiddenNeuron->input().insert(inputNeuron, new NeuralFactor(random.dequeue()));
for(auto outputNeuron : m_outputLayer->neurons())
for(auto hiddenNeuron : m_hiddenLayer->neurons())
outputNeuron->input().insert(hiddenNeuron, new NeuralFactor(random.dequeue()));
}
void NeuralNet::trainBackPropagation(const QList<QVector<qreal> > &inputs, const QList<QVector<qreal> > &expexted, int iterations)
{
{
QString line = "NeuralNet::trainBackPropagation()";
for(auto vector : inputs)
for(auto val : vector)
line.append(QString(" %0").arg(val, 0, 'f', 8));
for(auto vector : expexted)
for(auto val : vector)
line.append(QString(" %0").arg(val, 0, 'f', 8));
line.append(QString(" %0").arg(iterations));
debug(line);
}
for(int i = 0; i < iterations; i++)
{
debug(QString("NeuralNet::trainBackPropagation() %0").arg(i));
// set all weight changes to zero
initializeLearning();
for(int j = 0; j < inputs.size(); j++)
{
preparePerceptionLayerForPulse(inputs.at(j));
pulse();
calculateErrors(expexted.at(j));
calculateAndAppendTransformation();
}
applyLearning();
}
}
void NeuralNet::preparePerceptionLayerForPulse(const QVector<qreal> &input)
{
{
QString line = "NeuralNet::preparePerceptionLayerForPulse()";
Q_FOREACH(auto val, input)
line.append(QString(" %0").arg(val, 0, 'f', 8));
debug(line);
}
Q_ASSERT(input.size() == m_inputLayer->neurons().size());
for(int i = 0; i < input.size(); i++)
{
debug(QString("NeuralNet::preparePerceptionLayerForPulse() loop %0").arg(i));
m_inputLayer->neurons().at(i)->setOutput(input.at(i));
}
}
void NeuralNet::calculateErrors(const QVector<qreal> &expected)
{
{
QString line = "NeuralNet::calculateErrors()";
Q_FOREACH(auto val, expected)
line.append(QString(" %0").arg(val, 0, 'f', 8));
debug(line);
}
Q_ASSERT(expected.size() == m_outputLayer->neurons().count());
// Calcualte output error values
for(int i = 0; i < expected.size(); i++)
{
debug(QString("NeuralNet::calculateErrors() loop %0").arg(i));
auto outputNode = m_outputLayer->neurons().at(i);
auto temp = outputNode->output();
outputNode->setError((expected.at(i) - temp) * sigmoidDerivative(temp));
}
// calculate hidden layer error values
for(auto hiddenNode : m_hiddenLayer->neurons())
{
auto temp = hiddenNode->output();
qreal error = 0.0;
for(auto outputNode : m_outputLayer->neurons())
error += (outputNode->error() * outputNode->input().value(hiddenNode)->weight()) * sigmoidDerivative(temp);
hiddenNode->setError(error);
}
}
void NeuralNet::calculateAndAppendTransformation()
{
debug(QString("NeuralNet::calculateAndAppendTransformation()"));
// adjust output layer weight change
for(auto outputNode : m_outputLayer->neurons())
{
for(auto hiddenNode : m_hiddenLayer->neurons())
{
auto factor = outputNode->input().value(hiddenNode);
factor->setDelta(factor->delta() + (outputNode->error() * hiddenNode->output()));
}
outputNode->bias()->setDelta(outputNode->bias()->delta() + (outputNode->error() * outputNode->bias()->weight()));
}
for(auto hiddenNode : m_hiddenLayer->neurons())
{
for(auto inputNode : m_inputLayer->neurons())
{
auto factor = hiddenNode->input().value(inputNode);
factor->setDelta(factor->delta() + (hiddenNode->error() * inputNode->output()));
}
hiddenNode->bias()->setDelta(hiddenNode->bias()->delta() + (hiddenNode->error() * hiddenNode->bias()->weight()));
}
}
void NeuralNet::pulse()
{
debug("NeuralNet::pulse()");
m_hiddenLayer->pulse();
m_outputLayer->pulse();
}
void NeuralNet::applyLearning()
{
debug("NeuralNet::applyLearning()");
m_hiddenLayer->applyLearning();
m_outputLayer->applyLearning();
}
void NeuralNet::initializeLearning()
{
debug("NeuralNet::initializeLearning()");
m_hiddenLayer->initializeLearning();
m_outputLayer->initializeLearning();
}
qreal NeuralNet::learningRate() const
{
return m_learningRate;
}
void NeuralNet::setLearningRate(qreal learningRate)
{
debug(QString("NeuralNet::setLearningRate() %0").arg(learningRate, 0, 'f', 8));
if(m_learningRate != learningRate)
Q_EMIT learningRateChanged(m_learningRate = learningRate);
}
const INeuralLayer *NeuralNet::inputLayer() const
{
return m_inputLayer;
}
const INeuralLayer *NeuralNet::hiddenLayer() const
{
return m_hiddenLayer;
}
const INeuralLayer *NeuralNet::outputLayer() const
{
return m_outputLayer;
}
qreal NeuralNet::sigmoidDerivative(qreal value)
{
auto result = value * (1.0 - value);
debug(QString("NeuralNet::sigmoidDerivative() %0 %1").arg(value, 0, 'f', 8).arg(result, 0, 'f', 8));
return result;
}

52
neuralnet.h Normal file
View File

@@ -0,0 +1,52 @@
#ifndef NEURALNET_H
#define NEURALNET_H
#include <QObject>
#include "interfaces/ineuralnet.h"
template <class T> class QQueue;
class INeuralLayer;
class NeuralNet : public QObject, public INeuralNet
{
Q_OBJECT
Q_PROPERTY(qreal learningRate READ learningRate WRITE setLearningRate NOTIFY learningRateChanged)
Q_PROPERTY(INeuralLayer inputLayer READ inputLayer)
Q_PROPERTY(INeuralLayer hiddenLayer READ hiddenLayer)
Q_PROPERTY(INeuralLayer outputLayer READ outputLayer)
public:
explicit NeuralNet(int inputNeuronCount, int hiddenNeuronCount, int outputNeuronCount, QQueue<qreal> &random, QObject *parent = Q_NULLPTR);
void trainBackPropagation(const QList<QVector<qreal> > &inputs, const QList<QVector<qreal> > &expexted, int iterations);
void preparePerceptionLayerForPulse(const QVector<qreal> &input);
void calculateErrors(const QVector<qreal> &expected);
void calculateAndAppendTransformation();
// INeuralNet interface
void pulse() Q_DECL_OVERRIDE;
void applyLearning() Q_DECL_OVERRIDE;
void initializeLearning() Q_DECL_OVERRIDE;
qreal learningRate() const Q_DECL_OVERRIDE;
void setLearningRate(qreal learningRate) Q_DECL_OVERRIDE;
const INeuralLayer *inputLayer() const Q_DECL_OVERRIDE;
const INeuralLayer *hiddenLayer() const Q_DECL_OVERRIDE;
const INeuralLayer *outputLayer() const Q_DECL_OVERRIDE;
Q_SIGNALS:
void learningRateChanged(qreal learningRate);
private:
static qreal sigmoidDerivative(qreal value);
qreal m_learningRate;
INeuralLayer *m_inputLayer;
INeuralLayer *m_hiddenLayer;
INeuralLayer *m_outputLayer;
};
#endif // NEURALNET_H

106
neuron.cpp Normal file
View File

@@ -0,0 +1,106 @@
#include "neuron.h"
#include <qmath.h>
#include "neurallayer.h"
#include "neuralfactor.h"
#include "debug.h"
Neuron::Neuron(qreal bias, NeuralLayer *neuralLayer) :
QObject(neuralLayer),
INeuron(),
m_neuralLayer(neuralLayer),
m_input(),
m_error(0.0),
m_bias(new NeuralFactor(bias, this))
{
debug(QString("Neuron::Neuron() %0").arg(bias, 0, 'f', 8));
}
QMap<INeuronSignal *, NeuralFactor *> &Neuron::input()
{
return m_input;
}
const QMap<INeuronSignal *, NeuralFactor *> &Neuron::input() const
{
return m_input;
}
qreal Neuron::output() const
{
return m_output;
}
void Neuron::setOutput(qreal output)
{
debug(QString("Neuron::setOutput() %0").arg(output, 0, 'f', 8));
if(m_output != output)
Q_EMIT outputChanged(m_output = output);
}
void Neuron::pulse()
{
debug(QString("Neuron::pulse()"));
m_output = 0.0;
for(auto iter = m_input.constBegin(); iter != m_input.constEnd(); iter++)
m_output += iter.key()->output() * iter.value()->weight();
m_output += m_bias->weight();
m_output = sigmoid(m_output);
}
void Neuron::applyLearning(qreal learningRate)
{
debug(QString("Neuron::applyLearning() %0").arg(learningRate, 0, 'f', 8));
for(auto factor : m_input.values())
factor->applyWeightChange(learningRate);
m_bias->applyWeightChange(learningRate);
}
void Neuron::initializeLearning()
{
debug(QString("Neuron::initializeLearning()"));
for(auto factor : m_input.values())
factor->resetWeightChange();
m_bias->resetWeightChange();
}
qreal Neuron::error() const
{
return m_error;
}
void Neuron::setError(qreal error)
{
debug(QString("Neuron::setError() %0").arg(error, 0, 'f', 8));
if(m_error != error)
Q_EMIT errorChanged(m_error = error);
}
NeuralFactor *Neuron::bias()
{
return m_bias;
}
const NeuralFactor *Neuron::bias() const
{
return m_bias;
}
qreal Neuron::sigmoid(qreal value)
{
auto result = 1.0 / (1.0 + qExp(-value));
debug(QString("Neuron::sigmoid() %0 %1").arg(value, 0, 'f', 8).arg(result, 0, 'f', 8));
return result;
}

59
neuron.h Normal file
View File

@@ -0,0 +1,59 @@
#ifndef NEURON_H
#define NEURON_H
#include <QObject>
#include <QMap>
#include <qglobal.h>
#include "interfaces/ineuron.h"
class NeuralLayer;
class INeuronSignal;
class NeuralFactor;
class Neuron : public QObject, public INeuron
{
Q_OBJECT
Q_PROPERTY(QMap<INeuronSignal*,NeuralFactor*> input READ input)
Q_PROPERTY(qreal output READ output WRITE setOutput NOTIFY outputChanged)
Q_PROPERTY(qreal error READ error WRITE setError NOTIFY errorChanged)
Q_PROPERTY(NeuralFactor* bias READ bias)
public:
explicit Neuron(qreal bias, NeuralLayer *neuralLayer);
// INeuronRepetor interface
QMap<INeuronSignal *, NeuralFactor *> &input() Q_DECL_OVERRIDE;
const QMap<INeuronSignal *, NeuralFactor *> &input() const Q_DECL_OVERRIDE;
// INeuronSignal interface
qreal output() const Q_DECL_OVERRIDE;
void setOutput(qreal output) Q_DECL_OVERRIDE;
// INeuron interface
void pulse() Q_DECL_OVERRIDE;
void applyLearning(qreal learningRate) Q_DECL_OVERRIDE;
void initializeLearning() Q_DECL_OVERRIDE;
qreal error() const Q_DECL_OVERRIDE;
void setError(qreal error) Q_DECL_OVERRIDE;
NeuralFactor *bias() Q_DECL_OVERRIDE;
const NeuralFactor *bias() const Q_DECL_OVERRIDE;
Q_SIGNALS:
void outputChanged(qreal output);
void errorChanged(qreal error);
void lastErrorChanged(qreal lastError);
private:
static inline qreal sigmoid(qreal value);
NeuralLayer *m_neuralLayer;
QMap<INeuronSignal *, NeuralFactor *> m_input;
qreal m_output;
qreal m_error;
NeuralFactor *m_bias;
};
#endif // NEURON_H