这里基本完全参考网络资源完成,有疑问欢迎留言!

LinearRegression.h

#pragma once
#ifndef ML_LINEAEEEGRESSION_H
#define ML_LINEARREGRESSION_H
class LinearRegression {
public:
/*特征*/
double *x;
/*预测值*/
double *y;
/*样本数量*/
int m;
/*系数*/
double *theta;
/*创建实例*/
LinearRegression(double x[], double y[], int m);
/*训练 */
void train(double alpha, int iterations);
/*预测*/
double predict(double x);
private:
/*计算损失模型*/
static double compute_cost(double x[], double y[], double theta[], int m);
/*计算单个预测值*/
static double h(double x, double theta[]);
/*预测*/
static double *calculate_predictions(double x[], double theta[], int m);
/*梯度下降*/
static double *gradient_descent(double x[], double y[], double alpha, int iter, double *j, int m); };
#endif // !ML_LINEAEEEGRESSION_H

LinearRegression.cpp

#include "iostream"
#include "linearRegression.h"
#include "Utils.h"
using namespace std; /*初始化*/
LinearRegression::LinearRegression(double x[], double y[], int m)
{
this->x = x;
this->y = y;
this->m = m;
} /*
alpha:learn rate
iterations:iterators
*/
void LinearRegression::train(double alpha, int iterations)
{
double *J = new double[iterations];
this->theta = gradient_descent(x, y, alpha, iterations, J, m);
cout << "J=";
for (int i = ; i < iterations; ++i)
{
cout << J[i] << " " << endl;;
}
cout << "\n" << "Theta: " << theta[] << " " << theta[] << endl;
}
/*预测*/
double LinearRegression::predict(double x)
{
cout << "y':" << h(x, theta) << endl;
return h(x, theta);
} /*计算损失模型*/
double LinearRegression::compute_cost(double x[], double y[], double theta[], int m)
{
double *predictions = calculate_predictions(x, theta, m);
double *diff = Utils::array_diff(predictions, y, m);
double *sq_errors = Utils::array_pow(diff, m, );
return (1.0 / ( * m))*Utils::array_sum(sq_errors, m);
}
/*计算单个预测值*/
double LinearRegression::h(double x, double theta[])
{
return theta[] + theta[] * x;
}
/*预测*/
double *LinearRegression::calculate_predictions(double x[], double theta[], int m)
{
double *predictions = new double[m];
for (int i = ; i < m; i++)
{
predictions[i] = h(x[i], theta);
}
return predictions;
}
/*梯度下降*/
double *LinearRegression::gradient_descent(double x[], double y[], double alpha, int iter, double *J, int m)
{
double *theta = new double[];
theta[] = ;
theta[] = ;
for (int i = ; i < iter; i++)
{
double *predictions = calculate_predictions(x, theta, m);
double *diff = Utils::array_diff(predictions, y, m);
double *error_x1 = diff;
double *error_x2 = Utils::array_multiplication(diff, x, m);
/*这里可以设定J损失函数的阈值,也可以设定梯度变化量的阈值*/
theta[] = theta[] - alpha*(1.0 / m) * Utils::array_sum(error_x1, m);
theta[] = theta[] - alpha*(1.0 / m)*Utils::array_sum(error_x2, m);
J[i] = compute_cost(x, y, theta, m);
}
return theta;
}

Test.cpp

#include "iostream"
#include "linearRegression.h" using namespace std; int main()
{
double x[] = {,,,,};
double y[] = {,,,,}; LinearRegression test(x,y,);
test.train(0.1, );
test.predict();
system("pause");
return ;
}

最新文章

  1. 初识Angular
  2. MPMoviePlayerController属性,方法,通知整理
  3. ooj1057: M的整数倍DP
  4. 提高D3js力导向图加载速度(转)
  5. SecureCrt自动化
  6. Elasticsearch学习笔记
  7. IOS开发之——颜色设置
  8. UPDATE---修改表中数据
  9. mkisofs出错解决办法
  10. wndows 7 Wifi热点
  11. Oracle数据库之创建表结构
  12. 回溯算法-C#语言解决八皇后问题的写法与优化
  13. 10个SQL注入工具(转载)
  14. UOJ #311「UNR #2」积劳成疾
  15. Software Testing 1 —— 有关编程错误的经历
  16. sony Z5P 刷rec、root的方法
  17. hdu 1757 和1005差不多 (矩阵快速幂)
  18. 惊世骇俗的sql语句之连表查询
  19. 给定两个数组,这两个数组是排序好的,让你求这两个数组合到一起之后第K大的数。
  20. imei和imsi

热门文章

  1. 002-序列化装换JSON&amp;XML概述
  2. Oralce-资源配置PROFILE
  3. Windows7下MongoDB的下载、安装与配置详解
  4. Vagrant 手册之 Provisioning - Shell 配置程序
  5. MySQL 中的 information_schema 数据库
  6. 【ABAP系列】SAP 后台JOB如何DEBUG
  7. 【python】 判断纯ascii串
  8. 5期-Metasploitable3专题课程
  9. get_date.sh
  10. 网页导出excel