diff --git a/DbNeuralNet.pro b/DbNeuralNet.pro new file mode 100644 index 0000000..0529f9f --- /dev/null +++ b/DbNeuralNet.pro @@ -0,0 +1,35 @@ +QT += core +QT -= gui widgets + +DBLIBS += + +TARGET = neuralnet + +PROJECT_ROOT = .. + +SOURCES += main.cpp \ + neuron.cpp \ + neurallayer.cpp \ + neuralnet.cpp \ + neuralfactor.cpp \ + debug.cpp + +HEADERS += \ + neuralfactor.h \ + interfaces/ineuronreceptor.h \ + interfaces/ineuronsignal.h \ + interfaces/ineuron.h \ + interfaces/ineurallayer.h \ + interfaces/ineuralnet.h \ + neuron.h \ + neurallayer.h \ + neuralnet.h \ + debug.h + +FORMS += + +RESOURCES += + +TRANSLATIONS += + +include($${PROJECT_ROOT}/app.pri) diff --git a/debug.cpp b/debug.cpp new file mode 100644 index 0000000..8386d8a --- /dev/null +++ b/debug.cpp @@ -0,0 +1,25 @@ +#include "debug.h" + +#include +#include +#include +#include + +QFile file("log.txt"); +QTextStream textStream(&file); + +bool initDebug() +{ + if(!file.open(QIODevice::Truncate | QIODevice::WriteOnly | QIODevice::Text)) + { + qWarning() << file.errorString(); + return false; + } + + return true; +} + +void debug(const QString &line) +{ + textStream << line << endl; +} diff --git a/debug.h b/debug.h new file mode 100644 index 0000000..38aa42e --- /dev/null +++ b/debug.h @@ -0,0 +1,9 @@ +#ifndef DEBUG_H +#define DEBUG_H + +class QString; + +bool initDebug(); +void debug(const QString &line); + +#endif // DEBUG_H diff --git a/interfaces/ineurallayer.h b/interfaces/ineurallayer.h new file mode 100644 index 0000000..8f944f7 --- /dev/null +++ b/interfaces/ineurallayer.h @@ -0,0 +1,21 @@ +#ifndef INEURALLAYER_H +#define INEURALLAYER_H + +#include + +class INeuron; +class INeuralNet; + +class INeuralLayer +{ +public: + virtual void pulse() = 0; + virtual void applyLearning() = 0; + virtual void initializeLearning() = 0; + + virtual INeuron *addNeuron(qreal bias) = 0; + virtual QList &neurons() = 0; + virtual const QList &neurons() const = 0; +}; + +#endif // INEURALLAYER_H diff --git a/interfaces/ineuralnet.h b/interfaces/ineuralnet.h new file mode 100644 index 0000000..ae11c4c --- /dev/null +++ b/interfaces/ineuralnet.h @@ -0,0 +1,23 @@ +#ifndef INEURALNET_H +#define INEURALNET_H + +#include + +class INeuralLayer; + +class INeuralNet +{ +public: + virtual void pulse() = 0; + virtual void applyLearning() = 0; + virtual void initializeLearning() = 0; + + virtual qreal learningRate() const = 0; + virtual void setLearningRate(qreal learningRate) = 0; + + virtual const INeuralLayer *inputLayer() const = 0; + virtual const INeuralLayer *hiddenLayer() const = 0; + virtual const INeuralLayer *outputLayer() const = 0; +}; + +#endif // INEURALNET_H diff --git a/interfaces/ineuron.h b/interfaces/ineuron.h new file mode 100644 index 0000000..ead0a76 --- /dev/null +++ b/interfaces/ineuron.h @@ -0,0 +1,24 @@ +#ifndef INEURON_H +#define INEURON_H + +#include "ineuronsignal.h" +#include "ineuronreceptor.h" + +class INeuralLayer; +class NeuralFactor; + +class INeuron : public INeuronSignal, public INeuronRepetor +{ +public: + virtual void pulse() = 0; + virtual void applyLearning(qreal learningRate) = 0; + virtual void initializeLearning() = 0; + + virtual qreal error() const = 0; + virtual void setError(qreal error) = 0; + + virtual NeuralFactor *bias() = 0; + virtual const NeuralFactor *bias() const = 0; +}; + +#endif // INEURON_H diff --git a/interfaces/ineuronreceptor.h b/interfaces/ineuronreceptor.h new file mode 100644 index 0000000..d1a2550 --- /dev/null +++ b/interfaces/ineuronreceptor.h @@ -0,0 +1,16 @@ +#ifndef INEURONRECEPTOR_H +#define INEURONRECEPTOR_H + +template class QMap; + +class INeuronSignal; +class NeuralFactor; + +class INeuronRepetor +{ +public: + virtual QMap &input() = 0; + virtual const QMap &input() const = 0; +}; + +#endif // INEURONRECEPTOR_H diff --git a/interfaces/ineuronsignal.h b/interfaces/ineuronsignal.h new file mode 100644 index 0000000..ae188b9 --- /dev/null +++ b/interfaces/ineuronsignal.h @@ -0,0 +1,13 @@ +#ifndef INEURONSIGNAL_H +#define INEURONSIGNAL_H + +#include + +class INeuronSignal +{ +public: + virtual qreal output() const = 0; + virtual void setOutput(qreal output) = 0; +}; + +#endif // INEURONSIGNAL_H diff --git a/main.cpp b/main.cpp new file mode 100644 index 0000000..71435d0 --- /dev/null +++ b/main.cpp @@ -0,0 +1,92 @@ +#include +#include +#include +#include +#include + +#include "debug.h" +#include "neuralnet.h" +#include "interfaces/ineurallayer.h" +#include "interfaces/ineuron.h" + +const qreal low = .01; +const qreal mid = .5; +const qreal high = .99; +const int iterations = 5; + +int main(int argc, char *argv[]) +{ + QCoreApplication a(argc, argv); + + if(!initDebug()) + return -1; + + QQueue random; + random.enqueue(0.248668584157093); + random.enqueue(0.110743977181029); + random.enqueue(0.467010679872246); + random.enqueue(0.771604122021982); + random.enqueue(0.657518893786482); + random.enqueue(0.432782601300991); + random.enqueue(0.354083763600366); + random.enqueue(0.943862276125635); + random.enqueue(0.101266453555444); + + NeuralNet net(2, 2, 1, random); + + QList > input { + QVector { high, high }, + QVector { low, high }, + QVector { high, low }, + QVector { low, low } + }; + + QList > output { + QVector { low }, + QVector { high }, + QVector { high }, + QVector { low } + }; + + int count = 0; + qreal hh, lh, hl, ll; + do + { + net.setLearningRate(3); + net.trainBackPropagation(input, output, iterations); + + net.inputLayer()->neurons().at(0)->setOutput(high); + net.inputLayer()->neurons().at(1)->setOutput(high); + net.pulse(); + hh = net.outputLayer()->neurons().at(0)->output(); + + net.inputLayer()->neurons().at(0)->setOutput(low); + net.inputLayer()->neurons().at(1)->setOutput(high); + net.pulse(); + lh = net.outputLayer()->neurons().at(0)->output(); + + net.inputLayer()->neurons().at(0)->setOutput(high); + net.inputLayer()->neurons().at(1)->setOutput(low); + net.pulse(); + hl = net.outputLayer()->neurons().at(0)->output(); + + net.inputLayer()->neurons().at(0)->setOutput(low); + net.inputLayer()->neurons().at(1)->setOutput(low); + net.pulse(); + ll = net.outputLayer()->neurons().at(0)->output(); + + debug(QString("round %0: hh=%1 lh=%2 hl=%3 ll=%4").arg(count++).arg(hh, 0, 'f', 8).arg(lh, 0, 'f', 8).arg(hl, 0, 'f', 8).arg(ll, 0, 'f', 8)); + } + while(hh > (mid + low) / 2.0 || + lh < (mid + high) / 2.0 || + hl < (mid + low) / 2.0 || + ll > (mid + high) / 2); + + qDebug() << count * iterations << "iterations"; + qDebug() << "hh" << hh; + qDebug() << "lh" << lh; + qDebug() << "hl" << hl; + qDebug() << "ll" << ll; + + return 0; +} diff --git a/neuralfactor.cpp b/neuralfactor.cpp new file mode 100644 index 0000000..e0d07bf --- /dev/null +++ b/neuralfactor.cpp @@ -0,0 +1,51 @@ +#include "neuralfactor.h" + +#include "debug.h" + +NeuralFactor::NeuralFactor(qreal weight, QObject *parent) : + QObject(parent), + m_weight(weight), + m_delta(0.0) +{ + debug(QString("NeuralFactor::NeuralFactor() %0").arg(weight, 0, 'f', 8)); +} + +void NeuralFactor::applyWeightChange(qreal learningRate) +{ + debug(QString("NeuralFactor::applyWeightChange() %0").arg(learningRate, 0, 'f', 8)); + + m_weight += m_delta * learningRate; +} + +void NeuralFactor::resetWeightChange() +{ + debug(QString("NeuralFactor::resetWeightChange()")); + + m_delta = 0.0; +} + +qreal NeuralFactor::weight() const +{ + return m_weight; +} + +void NeuralFactor::setWeight(qreal weight) +{ + debug(QString("NeuralFactor::setWeight() %0").arg(weight, 0, 'f', 8)); + + if(m_weight != weight) + Q_EMIT weightChanged(m_weight = weight); +} + +qreal NeuralFactor::delta() const +{ + return m_delta; +} + +void NeuralFactor::setDelta(qreal delta) +{ + debug(QString("NeuralFactor::setDelta() %0").arg(delta, 0, 'f', 8)); + + if(m_delta != delta) + Q_EMIT deltaChanged(m_delta = delta); +} diff --git a/neuralfactor.h b/neuralfactor.h new file mode 100644 index 0000000..3b2f04b --- /dev/null +++ b/neuralfactor.h @@ -0,0 +1,36 @@ +#ifndef NEURALFACTOR_H +#define NEURALFACTOR_H + +#include +#include + +class Neuron; + +class NeuralFactor : public QObject +{ + Q_OBJECT + Q_PROPERTY(qreal weight READ weight WRITE setWeight NOTIFY weightChanged) + Q_PROPERTY(qreal delta READ delta WRITE setDelta NOTIFY deltaChanged) + +public: + explicit NeuralFactor(qreal weight, QObject *parent = Q_NULLPTR); + + void applyWeightChange(qreal learningRate); + void resetWeightChange(); + + qreal weight() const; + void setWeight(qreal weight); + + qreal delta() const; + void setDelta(qreal delta); + +Q_SIGNALS: + void weightChanged(qreal weight); + void deltaChanged(qreal delta); + +private: + qreal m_weight; + qreal m_delta; +}; + +#endif // NEURALFACTOR_H diff --git a/neurallayer.cpp b/neurallayer.cpp new file mode 100644 index 0000000..bfaa054 --- /dev/null +++ b/neurallayer.cpp @@ -0,0 +1,61 @@ +#include "neurallayer.h" + +#include "neuralnet.h" +#include "debug.h" +#include "neuron.h" + +NeuralLayer::NeuralLayer(NeuralNet *neuralNet) : + QObject(neuralNet), + INeuralLayer(), + m_neuralNet(neuralNet), + m_neurons() +{ + debug(QString("NeuralLayer::NeuralLayer()")); +} + +void NeuralLayer::pulse() +{ + debug(QString("NeuralLayer::pulse()")); + + for(auto neuron : m_neurons) + neuron->pulse(); +} + +void NeuralLayer::applyLearning() +{ + debug(QString("NeuralLayer::applyLearning()")); + + qreal learningRate = m_neuralNet->learningRate(); + + debug(QString("NeuralLayer::applyLearning() learningRate = %0").arg(learningRate, 0, 'f', 8)); + + for(auto neuron : m_neurons) + neuron->applyLearning(learningRate); +} + +void NeuralLayer::initializeLearning() +{ + debug(QString("NeuralLayer::initializeLearning()")); + + for(auto neuron : m_neurons) + neuron->initializeLearning(); +} + +INeuron *NeuralLayer::addNeuron(qreal bias) +{ + debug(QString("NeuralLayer::addNeuron() %0").arg(bias, 0, 'f', 8)); + + auto neuron = new Neuron(bias, this); + m_neurons << neuron; + return neuron; +} + +QList &NeuralLayer::neurons() +{ + return m_neurons; +} + +const QList &NeuralLayer::neurons() const +{ + return m_neurons; +} diff --git a/neurallayer.h b/neurallayer.h new file mode 100644 index 0000000..f680c04 --- /dev/null +++ b/neurallayer.h @@ -0,0 +1,33 @@ +#ifndef NEURALLAYER_H +#define NEURALLAYER_H + +#include +#include + +#include "interfaces/ineurallayer.h" + +class NeuralNet; +class INeuron; + +class NeuralLayer : public QObject, public INeuralLayer +{ + Q_OBJECT + +public: + explicit NeuralLayer(NeuralNet *neuralNet); + + // INeuralLayer interface + void pulse() Q_DECL_OVERRIDE; + void applyLearning() Q_DECL_OVERRIDE; + void initializeLearning() Q_DECL_OVERRIDE; + + INeuron *addNeuron(qreal bias); + QList &neurons() Q_DECL_OVERRIDE; + const QList &neurons() const Q_DECL_OVERRIDE; + +private: + NeuralNet *m_neuralNet; + QList m_neurons; +}; + +#endif // NEURALLAYER_H diff --git a/neuralnet.cpp b/neuralnet.cpp new file mode 100644 index 0000000..9b42d3d --- /dev/null +++ b/neuralnet.cpp @@ -0,0 +1,215 @@ +#include "neuralnet.h" + +#include +#include +#include + +#include "neurallayer.h" +#include "debug.h" +#include "neuron.h" +#include "neuralfactor.h" + +NeuralNet::NeuralNet(int inputNeuronCount, int hiddenNeuronCount, int outputNeuronCount, QQueue &random, QObject *parent) : + QObject(parent), + INeuralNet(), + m_learningRate(0.5), + m_inputLayer(new NeuralLayer(this)), + m_hiddenLayer(new NeuralLayer(this)), + m_outputLayer(new NeuralLayer(this)) +{ + debug(QString("NeuralNet::NeuralNet() %0 %1 %2").arg(inputNeuronCount).arg(hiddenNeuronCount).arg(outputNeuronCount)); + + Q_ASSERT(random.count() == hiddenNeuronCount + outputNeuronCount + (hiddenNeuronCount * inputNeuronCount) + (outputNeuronCount * hiddenNeuronCount)); + + for (int i = 0; i < inputNeuronCount; i++) + m_inputLayer->addNeuron(0); + + for (int i = 0; i < outputNeuronCount; i++) + m_outputLayer->addNeuron(random.dequeue()); + + for (int i = 0; i < hiddenNeuronCount; i++) + m_hiddenLayer->addNeuron(random.dequeue()); + + for(auto hiddenNeuron : m_hiddenLayer->neurons()) + for(auto inputNeuron : m_inputLayer->neurons()) + hiddenNeuron->input().insert(inputNeuron, new NeuralFactor(random.dequeue())); + + for(auto outputNeuron : m_outputLayer->neurons()) + for(auto hiddenNeuron : m_hiddenLayer->neurons()) + outputNeuron->input().insert(hiddenNeuron, new NeuralFactor(random.dequeue())); +} + +void NeuralNet::trainBackPropagation(const QList > &inputs, const QList > &expexted, int iterations) +{ + { + QString line = "NeuralNet::trainBackPropagation()"; + for(auto vector : inputs) + for(auto val : vector) + line.append(QString(" %0").arg(val, 0, 'f', 8)); + for(auto vector : expexted) + for(auto val : vector) + line.append(QString(" %0").arg(val, 0, 'f', 8)); + line.append(QString(" %0").arg(iterations)); + debug(line); + } + + for(int i = 0; i < iterations; i++) + { + debug(QString("NeuralNet::trainBackPropagation() %0").arg(i)); + + // set all weight changes to zero + initializeLearning(); + + for(int j = 0; j < inputs.size(); j++) + { + preparePerceptionLayerForPulse(inputs.at(j)); + pulse(); + calculateErrors(expexted.at(j)); + calculateAndAppendTransformation(); + } + + applyLearning(); + } +} + +void NeuralNet::preparePerceptionLayerForPulse(const QVector &input) +{ + { + QString line = "NeuralNet::preparePerceptionLayerForPulse()"; + Q_FOREACH(auto val, input) + line.append(QString(" %0").arg(val, 0, 'f', 8)); + debug(line); + } + + Q_ASSERT(input.size() == m_inputLayer->neurons().size()); + + for(int i = 0; i < input.size(); i++) + { + debug(QString("NeuralNet::preparePerceptionLayerForPulse() loop %0").arg(i)); + m_inputLayer->neurons().at(i)->setOutput(input.at(i)); + } +} + +void NeuralNet::calculateErrors(const QVector &expected) +{ + { + QString line = "NeuralNet::calculateErrors()"; + Q_FOREACH(auto val, expected) + line.append(QString(" %0").arg(val, 0, 'f', 8)); + debug(line); + } + + Q_ASSERT(expected.size() == m_outputLayer->neurons().count()); + + // Calcualte output error values + for(int i = 0; i < expected.size(); i++) + { + debug(QString("NeuralNet::calculateErrors() loop %0").arg(i)); + + auto outputNode = m_outputLayer->neurons().at(i); + auto temp = outputNode->output(); + + outputNode->setError((expected.at(i) - temp) * sigmoidDerivative(temp)); + } + + // calculate hidden layer error values + for(auto hiddenNode : m_hiddenLayer->neurons()) + { + auto temp = hiddenNode->output(); + + qreal error = 0.0; + for(auto outputNode : m_outputLayer->neurons()) + error += (outputNode->error() * outputNode->input().value(hiddenNode)->weight()) * sigmoidDerivative(temp); + + hiddenNode->setError(error); + } +} + +void NeuralNet::calculateAndAppendTransformation() +{ + debug(QString("NeuralNet::calculateAndAppendTransformation()")); + + // adjust output layer weight change + for(auto outputNode : m_outputLayer->neurons()) + { + for(auto hiddenNode : m_hiddenLayer->neurons()) + { + auto factor = outputNode->input().value(hiddenNode); + factor->setDelta(factor->delta() + (outputNode->error() * hiddenNode->output())); + } + + outputNode->bias()->setDelta(outputNode->bias()->delta() + (outputNode->error() * outputNode->bias()->weight())); + } + + for(auto hiddenNode : m_hiddenLayer->neurons()) + { + for(auto inputNode : m_inputLayer->neurons()) + { + auto factor = hiddenNode->input().value(inputNode); + factor->setDelta(factor->delta() + (hiddenNode->error() * inputNode->output())); + } + + hiddenNode->bias()->setDelta(hiddenNode->bias()->delta() + (hiddenNode->error() * hiddenNode->bias()->weight())); + } +} + +void NeuralNet::pulse() +{ + debug("NeuralNet::pulse()"); + + m_hiddenLayer->pulse(); + m_outputLayer->pulse(); +} + +void NeuralNet::applyLearning() +{ + debug("NeuralNet::applyLearning()"); + + m_hiddenLayer->applyLearning(); + m_outputLayer->applyLearning(); +} + +void NeuralNet::initializeLearning() +{ + debug("NeuralNet::initializeLearning()"); + + m_hiddenLayer->initializeLearning(); + m_outputLayer->initializeLearning(); +} + +qreal NeuralNet::learningRate() const +{ + return m_learningRate; +} + +void NeuralNet::setLearningRate(qreal learningRate) +{ + debug(QString("NeuralNet::setLearningRate() %0").arg(learningRate, 0, 'f', 8)); + + if(m_learningRate != learningRate) + Q_EMIT learningRateChanged(m_learningRate = learningRate); +} + +const INeuralLayer *NeuralNet::inputLayer() const +{ + return m_inputLayer; +} + +const INeuralLayer *NeuralNet::hiddenLayer() const +{ + return m_hiddenLayer; +} + +const INeuralLayer *NeuralNet::outputLayer() const +{ + return m_outputLayer; +} + +qreal NeuralNet::sigmoidDerivative(qreal value) +{ + auto result = value * (1.0 - value); + + debug(QString("NeuralNet::sigmoidDerivative() %0 %1").arg(value, 0, 'f', 8).arg(result, 0, 'f', 8)); + + return result; +} diff --git a/neuralnet.h b/neuralnet.h new file mode 100644 index 0000000..e9babbb --- /dev/null +++ b/neuralnet.h @@ -0,0 +1,52 @@ +#ifndef NEURALNET_H +#define NEURALNET_H + +#include + +#include "interfaces/ineuralnet.h" + +template class QQueue; + +class INeuralLayer; + +class NeuralNet : public QObject, public INeuralNet +{ + Q_OBJECT + Q_PROPERTY(qreal learningRate READ learningRate WRITE setLearningRate NOTIFY learningRateChanged) + Q_PROPERTY(INeuralLayer inputLayer READ inputLayer) + Q_PROPERTY(INeuralLayer hiddenLayer READ hiddenLayer) + Q_PROPERTY(INeuralLayer outputLayer READ outputLayer) + +public: + explicit NeuralNet(int inputNeuronCount, int hiddenNeuronCount, int outputNeuronCount, QQueue &random, QObject *parent = Q_NULLPTR); + + void trainBackPropagation(const QList > &inputs, const QList > &expexted, int iterations); + void preparePerceptionLayerForPulse(const QVector &input); + void calculateErrors(const QVector &expected); + void calculateAndAppendTransformation(); + + // INeuralNet interface + void pulse() Q_DECL_OVERRIDE; + void applyLearning() Q_DECL_OVERRIDE; + void initializeLearning() Q_DECL_OVERRIDE; + + qreal learningRate() const Q_DECL_OVERRIDE; + void setLearningRate(qreal learningRate) Q_DECL_OVERRIDE; + + const INeuralLayer *inputLayer() const Q_DECL_OVERRIDE; + const INeuralLayer *hiddenLayer() const Q_DECL_OVERRIDE; + const INeuralLayer *outputLayer() const Q_DECL_OVERRIDE; + +Q_SIGNALS: + void learningRateChanged(qreal learningRate); + +private: + static qreal sigmoidDerivative(qreal value); + + qreal m_learningRate; + INeuralLayer *m_inputLayer; + INeuralLayer *m_hiddenLayer; + INeuralLayer *m_outputLayer; +}; + +#endif // NEURALNET_H diff --git a/neuron.cpp b/neuron.cpp new file mode 100644 index 0000000..fd21b13 --- /dev/null +++ b/neuron.cpp @@ -0,0 +1,106 @@ +#include "neuron.h" + +#include + +#include "neurallayer.h" +#include "neuralfactor.h" +#include "debug.h" + +Neuron::Neuron(qreal bias, NeuralLayer *neuralLayer) : + QObject(neuralLayer), + INeuron(), + m_neuralLayer(neuralLayer), + m_input(), + m_error(0.0), + m_bias(new NeuralFactor(bias, this)) +{ + debug(QString("Neuron::Neuron() %0").arg(bias, 0, 'f', 8)); +} + +QMap &Neuron::input() +{ + return m_input; +} + +const QMap &Neuron::input() const +{ + return m_input; +} + +qreal Neuron::output() const +{ + return m_output; +} + +void Neuron::setOutput(qreal output) +{ + debug(QString("Neuron::setOutput() %0").arg(output, 0, 'f', 8)); + + if(m_output != output) + Q_EMIT outputChanged(m_output = output); +} + +void Neuron::pulse() +{ + debug(QString("Neuron::pulse()")); + + m_output = 0.0; + + for(auto iter = m_input.constBegin(); iter != m_input.constEnd(); iter++) + m_output += iter.key()->output() * iter.value()->weight(); + + m_output += m_bias->weight(); + m_output = sigmoid(m_output); +} + +void Neuron::applyLearning(qreal learningRate) +{ + debug(QString("Neuron::applyLearning() %0").arg(learningRate, 0, 'f', 8)); + + for(auto factor : m_input.values()) + factor->applyWeightChange(learningRate); + + m_bias->applyWeightChange(learningRate); +} + +void Neuron::initializeLearning() +{ + debug(QString("Neuron::initializeLearning()")); + + for(auto factor : m_input.values()) + factor->resetWeightChange(); + + m_bias->resetWeightChange(); +} + +qreal Neuron::error() const +{ + return m_error; +} + +void Neuron::setError(qreal error) +{ + debug(QString("Neuron::setError() %0").arg(error, 0, 'f', 8)); + + if(m_error != error) + Q_EMIT errorChanged(m_error = error); +} + +NeuralFactor *Neuron::bias() +{ + return m_bias; +} + +const NeuralFactor *Neuron::bias() const +{ + return m_bias; +} + +qreal Neuron::sigmoid(qreal value) +{ + auto result = 1.0 / (1.0 + qExp(-value)); + + debug(QString("Neuron::sigmoid() %0 %1").arg(value, 0, 'f', 8).arg(result, 0, 'f', 8)); + + return result; +} diff --git a/neuron.h b/neuron.h new file mode 100644 index 0000000..d2b271e --- /dev/null +++ b/neuron.h @@ -0,0 +1,59 @@ +#ifndef NEURON_H +#define NEURON_H + +#include +#include +#include + +#include "interfaces/ineuron.h" + +class NeuralLayer; +class INeuronSignal; +class NeuralFactor; + +class Neuron : public QObject, public INeuron +{ + Q_OBJECT + Q_PROPERTY(QMap input READ input) + Q_PROPERTY(qreal output READ output WRITE setOutput NOTIFY outputChanged) + Q_PROPERTY(qreal error READ error WRITE setError NOTIFY errorChanged) + Q_PROPERTY(NeuralFactor* bias READ bias) + +public: + explicit Neuron(qreal bias, NeuralLayer *neuralLayer); + + // INeuronRepetor interface + QMap &input() Q_DECL_OVERRIDE; + const QMap &input() const Q_DECL_OVERRIDE; + + // INeuronSignal interface + qreal output() const Q_DECL_OVERRIDE; + void setOutput(qreal output) Q_DECL_OVERRIDE; + + // INeuron interface + void pulse() Q_DECL_OVERRIDE; + void applyLearning(qreal learningRate) Q_DECL_OVERRIDE; + void initializeLearning() Q_DECL_OVERRIDE; + + qreal error() const Q_DECL_OVERRIDE; + void setError(qreal error) Q_DECL_OVERRIDE; + + NeuralFactor *bias() Q_DECL_OVERRIDE; + const NeuralFactor *bias() const Q_DECL_OVERRIDE; + +Q_SIGNALS: + void outputChanged(qreal output); + void errorChanged(qreal error); + void lastErrorChanged(qreal lastError); + +private: + static inline qreal sigmoid(qreal value); + + NeuralLayer *m_neuralLayer; + QMap m_input; + qreal m_output; + qreal m_error; + NeuralFactor *m_bias; +}; + +#endif // NEURON_H