```
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include<iostream>
using namespace std;
#define INPUT_NODES 4
//3
#define HIDDEN_NODES 5 //隐藏层
#define OUTPUT_NODES 3
//4
#define LEARNING_RATE 0.1
double inputLayer[INPUT_NODES];
double hiddenLayer[HIDDEN_NODES];
double outputLayer[OUTPUT_NODES];
double inputToHiddenWeights[INPUT_NODES][HIDDEN_NODES];
double hiddenToOutputWeights[HIDDEN_NODES][OUTPUT_NODES];
double hiddenBias[HIDDEN_NODES];
double outputBias[OUTPUT_NODES];
// Activation functions//激活函数
double sigmoid(float x) {
return 1.0 / (1.0 + exp(-x));
}
double sigmoid(double x) {
return 1.0 / (1.0 + exp(-x));
}
double dSigmoid(float x) {
return x * (1.0 - x);
}
double dSigmoid(double x) {
return x * (1.0 - x);
}
double dSigmoid_ld(double x) {
return x * (1.0 - x);
}
void initializeWeights() {
for (int i = 0; i < INPUT_NODES; i++) {
for (int j = 0; j < HIDDEN_NODES; j++) {
inputToHiddenWeights[i][j] = ((double)rand() / RAND_MAX) * 2 - 1; // between -1 and 1//RAND_MAX==32767
}
}
for (int i = 0; i < HIDDEN_NODES; i++) {
for (int j = 0; j < OUTPUT_NODES; j++) {
hiddenToOutputWeights[i][j] = ((double)rand() / RAND_MAX) * 2 - 1; // between -1 and 1
}
}
}
void forwardPropagation() {
// Input to hidden layer
for (int j = 0; j < HIDDEN_NODES; j++) {
hiddenLayer[j] = 0;
for (int i = 0; i < INPUT_NODES; i++) {
hiddenLayer[j] += inputLayer[i] * inputToHiddenWeights[i][j];
}
hiddenLayer[j] += hiddenBias[j];
hiddenLayer[j] = sigmoid(hiddenLayer[j]);
}
// Hidden to output layer
for (int k = 0; k < OUTPUT_NODES; k++) {
outputLayer[k] = 0;
for (int j = 0; j < HIDDEN_NODES; j++) {
outputLayer[k] += hiddenLayer[j] * hiddenToOutputWeights[j][k];
}
outputLayer[k] += outputBias[k];
outputLayer[k] = sigmoid(outputLayer[k]);
}
}
void backwardPropagation(double target[OUTPUT_NODES]) {
double outputErrors[OUTPUT_NODES];
double hiddenErrors[HIDDEN_NODES];
//计算损失Calculate output errors
for (int k = 0; k < OUTPUT_NODES; k++) {
outputErrors[k] = target[k] - outputLayer[k];
}
//算隐藏层的损失Calculate hidden layer errors
for (int j = 0; j < HIDDEN_NODES; j++) {//for220j
hiddenErrors[j] = 0;
for (int k = 0; k < OUTPUT_NODES; k++) {//for3300k
hiddenErrors[j] += outputErrors[k] * hiddenToOutputWeights[j][k];
}//for3300k
}//for220j
//更新隐藏层的权重Update hidden to output weights
for (int j = 0; j < HIDDEN_NODES; j++) {//for440j
for (int k = 0; k < OUTPUT_NODES; k++) {//for5500k
hiddenToOutputWeights[j][k] += LEARNING_RATE * outputErrors[k] * dSigmoid(outputLayer[k]) * hiddenLayer[j];
}//for5500k
}//for440j
//更新输入层权重Update input to hidden weights
for (int i = 0; i < INPUT_NODES; i++) {//for660i
for (int j = 0; j < HIDDEN_NODES; j++) {
inputToHiddenWeights[i][j] += LEARNING_RATE * hiddenErrors[j] * dSigmoid(hiddenLayer[j]) * inputLayer[i];
}//for7700j
}//for660i
}//void backwardPropagation(double target[OUTPUT_NODES])
int main() {
cout << RAND_MAX << endl;
initializeWeights();
// Example
inputLayer[0] = 0.5;
inputLayer[1] = 0.25;
inputLayer[2] = -0.75;
inputLayer[3] = 0;
double target[OUTPUT_NODES] = { 0.1, 0.9, 0.2 }; //, 0.8}; //此为训练数据!
for (int epoch = 0; epoch < 10000; epoch++) { // Training for 10000 epochs
forwardPropagation();
backwardPropagation(target);
}
//测试数据:
inputLayer[0] = 0.55;
inputLayer[1] = 0.24;
inputLayer[2] = -0.9;
inputLayer[3] = 0.021;
forwardPropagation();
for (int k = 0; k < OUTPUT_NODES; k++) {
printf("Output[%d]: %f\n", k, outputLayer[k]);
}
return 0;
}//main
```