#include #include #include #include // Hyperparameters #define LEARNING_RATE 0.5 #define EPOCHS 10000 // Activation function (Sigmoid) double sigmoid(double x) { return 1 / (1 + exp(-x)); } // Derivative of sigmoid double sigmoid_derivative(double x) { return x * (1 - x); } // Random number generator for weight initialization double random_weight() { return ((double)rand() / RAND_MAX) * 2 - 1; // Range [-1, 1] } int main() { srand(time(NULL)); // Seed random generator // Training data: 3-bit binary input and its decimal equivalent (normalized) double input[8][3] = { {0, 0, 0}, {0, 0, 1}, {0, 1, 0}, {0, 1, 1}, {1, 0, 0}, {1, 0, 1}, {1, 1, 0}, {1, 1, 1} }; double expected_output[8]; for (int i = 0; i < 8; i++) { expected_output[i] = i / 7.0; // Normalize output (0 to 1) } // Initialize weights and biases randomly double hidden_weights[3][2], hidden_bias[2]; double output_weights[2], output_bias; // Random weight initialization for (int i = 0; i < 3; i++) for (int j = 0; j < 2; j++) hidden_weights[i][j] = random_weight(); for (int j = 0; j < 2; j++) hidden_bias[j] = random_weight(); for (int k = 0; k < 2; k++) output_weights[k] = random_weight(); output_bias = random_weight(); // Training loop for (int epoch = 0; epoch < EPOCHS; epoch++) { double total_error = 0; for (int i = 0; i < 8; i++) { // Forward pass: Hidden Layer double hidden_output[2]; for (int j = 0; j < 2; j++) { hidden_output[j] = sigmoid( input[i][0] * hidden_weights[0][j] + input[i][1] * hidden_weights[1][j] + input[i][2] * hidden_weights[2][j] + hidden_bias[j] ); } // Forward pass: Output Layer double output = sigmoid( hidden_output[0] * output_weights[0] + hidden_output[1] * output_weights[1] + output_bias ); // Compute error double error = expected_output[i] - output; total_error += error * error; // Backpropagation: Output Layer double output_delta = error * sigmoid_derivative(output); // Backpropagation: Hidden Layer double hidden_delta[2]; for (int j = 0; j < 2; j++) { hidden_delta[j] = output_delta * output_weights[j] * sigmoid_derivative(hidden_output[j]); } // Update weights and biases (Gradient Descent) for (int j = 0; j < 2; j++) { output_weights[j] += LEARNING_RATE * output_delta * hidden_output[j]; hidden_bias[j] += LEARNING_RATE * hidden_delta[j]; } output_bias += LEARNING_RATE * output_delta; for (int j = 0; j < 2; j++) { for (int m = 0; m < 3; m++) { hidden_weights[m][j] += LEARNING_RATE * hidden_delta[j] * input[i][m]; } } } // Print error every 5000 epochs if (epoch % 5000 == 0) { printf("Epoch %d, Error: %.6f\n", epoch, total_error / 8); } } // Testing the trained network printf("\nTesting the trained neural network:\n"); for (int i = 0; i < 8; i++) { double hidden_output[2]; // Forward pass - Hidden Layer for (int j = 0; j < 2; j++) { hidden_output[j] = sigmoid( input[i][0] * hidden_weights[0][j] + input[i][1] * hidden_weights[1][j] + input[i][2] * hidden_weights[2][j] + hidden_bias[j] ); } // Forward pass - Output Layer double output = sigmoid( hidden_output[0] * output_weights[0] + hidden_output[1] * output_weights[1] + output_bias ); printf("Input: (%.0f, %.0f, %.0f) -> Output: %.2f (Expected: %.0f)\n", input[i][0], input[i][1], input[i][2], output * 7, expected_output[i] * 7); } return 0; }