#include #include #include // Hyperparameters #define LEARNING_RATE 0.5 #define EPOCHS 10000 // Sigmoid activation function double sigmoid(double x) { return 1 / (1 + exp(-x)); } // Derivative of sigmoid double sigmoid_derivative(double x) { return x * (1 - x); } int main() { // Training data (XOR problem) double input[4][2] = { {0, 0}, {0, 1}, {1, 0}, {1, 1} }; double expected_output[4] = {0, 1, 1, 0}; // Initialize weights and biases with random values double hidden_weights[2][3] = {{0.5, -0.2, 0.3}, {-0.3, 0.8, -0.5}}; double hidden_bias[3] = {0.1, -0.1, 0.2}; double output_weights[3] = {0.6, -0.4, 0.7}; double output_bias = 0.2; // Training loop for (int epoch = 0; epoch < EPOCHS; epoch++) { double total_error = 0; for (int i = 0; i < 4; i++) { // Forward pass - Hidden layer double hidden_layer[3]; for (int j = 0; j < 3; j++) { hidden_layer[j] = sigmoid(input[i][0] * hidden_weights[0][j] + input[i][1] * hidden_weights[1][j] + hidden_bias[j]); } // Forward pass - Output layer double output = sigmoid(hidden_layer[0] * output_weights[0] + hidden_layer[1] * output_weights[1] + hidden_layer[2] * output_weights[2] + output_bias); // Compute error double error = expected_output[i] - output; total_error += error * error; // Backpropagation - Output layer double output_delta = error * sigmoid_derivative(output); // Backpropagation - Hidden layer double hidden_delta[3]; for (int j = 0; j < 3; j++) { hidden_delta[j] = output_delta * output_weights[j] * sigmoid_derivative(hidden_layer[j]); } // Update weights and biases for (int j = 0; j < 3; j++) { output_weights[j] += LEARNING_RATE * output_delta * hidden_layer[j]; hidden_weights[0][j] += LEARNING_RATE * hidden_delta[j] * input[i][0]; hidden_weights[1][j] += LEARNING_RATE * hidden_delta[j] * input[i][1]; hidden_bias[j] += LEARNING_RATE * hidden_delta[j]; } output_bias += LEARNING_RATE * output_delta; } // Print error every 1000 epochs if (epoch % 1000 == 0) { printf("Epoch %d, Error: %.6f\n", epoch, total_error / 4); } } // Testing the trained network printf("\nTesting the trained neural network:\n"); for (int i = 0; i < 4; i++) { double hidden_layer[3]; // Forward pass - Hidden layer for (int j = 0; j < 3; j++) { hidden_layer[j] = sigmoid(input[i][0] * hidden_weights[0][j] + input[i][1] * hidden_weights[1][j] + hidden_bias[j]); } // Forward pass - Output layer double output = sigmoid(hidden_layer[0] * output_weights[0] + hidden_layer[1] * output_weights[1] + hidden_layer[2] * output_weights[2] + output_bias); printf("Input: (%.0f, %.0f) -> Output: %.4f\n", input[i][0], input[i][1], output); } return 0; }