-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.cpp
53 lines (39 loc) · 1.41 KB
/
main.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
#include "Eigen/Dense"
#include <iostream>
int main(int argc, char* argv[]){
// Step 1: Define the data
Eigen::VectorXd y(5); // Target vector
y << 2, 2.8, 3.6, 4.5, 5.1;
Eigen::MatrixXd X(5, 2); // Feature matrix with a column for the intercept (ones)
X << 1, 1,
1, 2,
1, 3,
1, 4,
1, 5;
// Initialize parameters (intercept and slope)
Eigen::VectorXd beta(2);
beta << 0, 0;
// Hyperparameters
double learning_rate = 0.01;
int num_iterations = 100;
// Gradient descent loop
for (int iter = 0; iter < num_iterations; ++iter) {
// Step 2: Compute predictions
Eigen::VectorXd y_pred = X * beta;
// Step 3: Calculate the loss (Mean Squared Error)
Eigen::VectorXd error = y_pred - y;
double loss = (error.array().square().sum()) / y.size();
// Output the loss every 100 iterations
if (iter % 10 == 0) {
std::cout << "Iteration " << iter << ", Loss: " << loss << std::endl;
}
// Step 4: Calculate gradients
Eigen::VectorXd gradients = (2.0 / y.size()) * X.transpose() * error;
// Step 5: Update parameters
beta -= learning_rate * gradients;
}
// Output the final parameters
std::cout << "Final Intercept (beta_0): " << beta(0) << std::endl;
std::cout << "Final Slope (beta_1): " << beta(1) << std::endl;
return 0;
}