我们仍然使用披萨直径的价格的数据

import matplotlib
matplotlib.rcParams['font.sans-serif']=[u'simHei']
matplotlib.rcParams['axes.unicode_minus']=False
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeaturesX_train = [[6],[8],[10],[14],[18]]
y_train = [[7],[9],[13],[17.5],[18]]
X_test = [[6],[8],[11],[16]]
y_test = [[8],[12],[15],[18]]LR = LinearRegression()
LR.fit(X_train,y_train)xx = np.linspace(0,26,100)
yy = LR.predict(xx.reshape(xx.shape[0],1))
plt.plot(xx,yy)

二阶多项式回归

# In[1] 二次回归,二阶多项式回归
#PolynomialFeatures转换器可以用于为一个特征表示增加多项式特征
quadratic_featurizer = PolynomialFeatures(degree=2)
X_train_quadratic = quadratic_featurizer.fit_transform(X_train)
X_test_quadratic = quadratic_featurizer.transform(X_test)regressor_quadratic = LinearRegression()
regressor_quadratic.fit(X_train_quadratic,y_train)xx_quadratic = quadratic_featurizer.transform(xx.reshape(xx.shape[0],1))
yy_quadratic = regressor_quadratic.predict(xx_quadratic)
plt.plot(xx,yy_quadratic,c='r',linestyle='--')

# In[2] 图参数,输出结果
plt.title("披萨价格和直径的关系")
plt.xlabel("直径")
plt.ylabel("价格")
plt.axis([0,25,0,25])
plt.grid(True)
plt.scatter(X_train,y_train)print("X_train\n",X_train)
print("X_train_quadratic\n",X_train_quadratic)
print("X_test\n",X_test)
print("X_test_quadratic\n",X_test_quadratic)
print("简单线性规划R方",LR.score(X_test,y_test))
print("二阶多项式回归R方",regressor_quadratic.score(X_test_quadratic,y_test))

X_train[[6], [8], [10], [14], [18]]
X_train_quadratic[[  1.   6.  36.][  1.   8.  64.][  1.  10. 100.][  1.  14. 196.][  1.  18. 324.]]
X_test[[6], [8], [11], [16]]
X_test_quadratic[[  1.   6.  36.][  1.   8.  64.][  1.  11. 121.][  1.  16. 256.]]
简单线性规划R方 0.809726797707665

三阶多项式回归

# In[3] 尝试三阶多项式回归
cubic_featurizer = PolynomialFeatures(degree=3)
X_train_cubic = cubic_featurizer.fit_transform(X_train)
X_test_cubic = cubic_featurizer.transform(X_test)regressor_cubic = LinearRegression()
regressor_cubic.fit(X_train_cubic,y_train)xx_cubic = cubic_featurizer.transform(xx.reshape(xx.shape[0],1))
yy_cubic = regressor_cubic.predict(xx_cubic)
plt.plot(xx,yy_cubic,c='g',linestyle='--')
plt.show()print("X_train\n",X_train)
print("X_train_cubic\n",X_train_cubic)
print("X_test\n",X_test)
print("X_test_cubic\n",X_test_cubic)
print("三阶多项式回归R方",regressor_cubic.score(X_test_cubic,y_test))

X_train[[6], [8], [10], [14], [18]]
X_train_cubic[[1.000e+00 6.000e+00 3.600e+01 2.160e+02][1.000e+00 8.000e+00 6.400e+01 5.120e+02][1.000e+00 1.000e+01 1.000e+02 1.000e+03][1.000e+00 1.400e+01 1.960e+02 2.744e+03][1.000e+00 1.800e+01 3.240e+02 5.832e+03]]
X_test[[6], [8], [11], [16]]
X_test_cubic[[1.000e+00 6.000e+00 3.600e+01 2.160e+02][1.000e+00 8.000e+00 6.400e+01 5.120e+02][1.000e+00 1.100e+01 1.210e+02 1.331e+03][1.000e+00 1.600e+01 2.560e+02 4.096e+03]]
三阶多项式回归R方 0.8356924156036954

九阶多项式回归

# In[4] 尝试九阶多项式回归
nine_featurizer = PolynomialFeatures(degree=9)
X_train_nine = nine_featurizer.fit_transform(X_train)
X_test_nine = nine_featurizer.transform(X_test)regressor_nine = LinearRegression()
regressor_nine.fit(X_train_nine,y_train)xx_nine = nine_featurizer.transform(xx.reshape(xx.shape[0],1))
yy_nine = regressor_nine.predict(xx_nine)
plt.plot(xx,yy_nine,c='k',linestyle='--')
plt.show()print("X_train\n",X_train)
print("X_train_nine\n",X_train_nine)
print("X_test\n",X_test)
print("X_test_nine\n",X_test_nine)
print("九阶多项式回归R方",regressor_nine.score(X_test_nine,y_test))

X_train[[6], [8], [10], [14], [18]]
X_train_nine[[1.00000000e+00 6.00000000e+00 3.60000000e+01 2.16000000e+021.29600000e+03 7.77600000e+03 4.66560000e+04 2.79936000e+051.67961600e+06 1.00776960e+07][1.00000000e+00 8.00000000e+00 6.40000000e+01 5.12000000e+024.09600000e+03 3.27680000e+04 2.62144000e+05 2.09715200e+061.67772160e+07 1.34217728e+08][1.00000000e+00 1.00000000e+01 1.00000000e+02 1.00000000e+031.00000000e+04 1.00000000e+05 1.00000000e+06 1.00000000e+071.00000000e+08 1.00000000e+09][1.00000000e+00 1.40000000e+01 1.96000000e+02 2.74400000e+033.84160000e+04 5.37824000e+05 7.52953600e+06 1.05413504e+081.47578906e+09 2.06610468e+10][1.00000000e+00 1.80000000e+01 3.24000000e+02 5.83200000e+031.04976000e+05 1.88956800e+06 3.40122240e+07 6.12220032e+081.10199606e+10 1.98359290e+11]]
X_test[[6], [8], [11], [16]]
X_test_nine[[1.00000000e+00 6.00000000e+00 3.60000000e+01 2.16000000e+021.29600000e+03 7.77600000e+03 4.66560000e+04 2.79936000e+051.67961600e+06 1.00776960e+07][1.00000000e+00 8.00000000e+00 6.40000000e+01 5.12000000e+024.09600000e+03 3.27680000e+04 2.62144000e+05 2.09715200e+061.67772160e+07 1.34217728e+08][1.00000000e+00 1.10000000e+01 1.21000000e+02 1.33100000e+031.46410000e+04 1.61051000e+05 1.77156100e+06 1.94871710e+072.14358881e+08 2.35794769e+09][1.00000000e+00 1.60000000e+01 2.56000000e+02 4.09600000e+036.55360000e+04 1.04857600e+06 1.67772160e+07 2.68435456e+084.29496730e+09 6.87194767e+10]]
九阶多项式回归R方 -0.09435666704291412

所有代码

 

# -*- coding: utf-8 -*-
import matplotlib
matplotlib.rcParams['font.sans-serif']=[u'simHei']
matplotlib.rcParams['axes.unicode_minus']=False
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeaturesX_train = [[6],[8],[10],[14],[18]]
y_train = [[7],[9],[13],[17.5],[18]]
X_test = [[6],[8],[11],[16]]
y_test = [[8],[12],[15],[18]]LR = LinearRegression()
LR.fit(X_train,y_train)xx = np.linspace(0,26,100)
yy = LR.predict(xx.reshape(xx.shape[0],1))
plt.plot(xx,yy)# In[1] 二次回归,二阶多项式回归
#PolynomialFeatures转换器可以用于为一个特征表示增加多项式特征
quadratic_featurizer = PolynomialFeatures(degree=2)
X_train_quadratic = quadratic_featurizer.fit_transform(X_train)
X_test_quadratic = quadratic_featurizer.transform(X_test)regressor_quadratic = LinearRegression()
regressor_quadratic.fit(X_train_quadratic,y_train)xx_quadratic = quadratic_featurizer.transform(xx.reshape(xx.shape[0],1))
yy_quadratic = regressor_quadratic.predict(xx_quadratic)
plt.plot(xx,yy_quadratic,c='r',linestyle='--')# In[2] 图参数,输出结果
plt.title("披萨价格和直径的关系")
plt.xlabel("直径")
plt.ylabel("价格")
plt.axis([0,25,0,25])
plt.grid(True)
plt.scatter(X_train,y_train)print("X_train\n",X_train)
print("X_train_quadratic\n",X_train_quadratic)
print("X_test\n",X_test)
print("X_test_quadratic\n",X_test_quadratic)
print("简单线性规划R方",LR.score(X_test,y_test))
print("二阶多项式回归R方",regressor_quadratic.score(X_test_quadratic,y_test))# In[3] 尝试三阶多项式回归
cubic_featurizer = PolynomialFeatures(degree=3)
X_train_cubic = cubic_featurizer.fit_transform(X_train)
X_test_cubic = cubic_featurizer.transform(X_test)regressor_cubic = LinearRegression()
regressor_cubic.fit(X_train_cubic,y_train)xx_cubic = cubic_featurizer.transform(xx.reshape(xx.shape[0],1))
yy_cubic = regressor_cubic.predict(xx_cubic)
plt.plot(xx,yy_cubic,c='g',linestyle='--')
plt.show()print("X_train\n",X_train)
print("X_train_cubic\n",X_train_cubic)
print("X_test\n",X_test)
print("X_test_cubic\n",X_test_cubic)
print("三阶多项式回归R方",regressor_cubic.score(X_test_cubic,y_test))# In[4] 尝试九阶多项式回归
nine_featurizer = PolynomialFeatures(degree=9)
X_train_nine = nine_featurizer.fit_transform(X_train)
X_test_nine = nine_featurizer.transform(X_test)regressor_nine = LinearRegression()
regressor_nine.fit(X_train_nine,y_train)xx_nine = nine_featurizer.transform(xx.reshape(xx.shape[0],1))
yy_nine = regressor_nine.predict(xx_nine)
plt.plot(xx,yy_nine,c='k',linestyle='--')
plt.show()print("X_train\n",X_train)
print("X_train_nine\n",X_train_nine)
print("X_test\n",X_test)
print("X_test_nine\n",X_test_nine)
print("九阶多项式回归R方",regressor_nine.score(X_test_nine,y_test))

scikit-learn机器学习(三)多项式回归(二阶,三阶,九阶)相关推荐

  1. [转载]Scikit Learn: 在python中机器学习

    原址:http://my.oschina.net/u/175377/blog/84420 目录[-] Scikit Learn: 在python中机器学习 载入示例数据 一个改变数据集大小的示例:数码 ...

  2. 机器学习与Scikit Learn学习库

    摘要: 本文介绍机器学习相关的学习库Scikit Learn,包含其安装及具体识别手写体数字案例,适合机器学习初学者入门Scikit Learn. 在我科研的时候,机器学习(ML)是计算机科学领域中最 ...

  3. Scikit Learn: 在python中机器学习

    Warning 警告:有些没能理解的句子,我以自己的理解意译. 翻译自:Scikit Learn:Machine Learning in Python 作者: Fabian Pedregosa, Ga ...

  4. 【scikit-learn】如何用Python和SciKit Learn 0.18实现神经网络

    本教程的代码和数据来自于 Springboard 的博客教程.本文的作者为 Jose Portilla,他是网络教育平台 Udemy 一门数据科学类课程的讲师. GitHub 链接:https://g ...

  5. scikit - learn 做文本分类

    文章来源: https://my.oschina.net/u/175377/blog/84420 Scikit Learn: 在python中机器学习 Warning 警告:有些没能理解的句子,我以自 ...

  6. 【机器学习】多项式回归原理介绍

    [机器学习]多项式回归原理介绍 [机器学习]多项式回归python实现 [机器学习]多项式回归sklearn实现 在上一节中我们介绍了线性回归的原理,然后分别用python和sklearn实现了不同变 ...

  7. 机器学习(三)线性回归、广义线性回归、非线性回归

    机器学习(三)线性回归模型.广义线性回归模型.非线性回归模型 线性回归(数据集要满足正态分布) 一元线性回归模型: 在这里会想到,如何确定方程中的系数呢?我们先来了解最小二乘法,简单来说就是这个点作y ...

  8. python笔迹识别_python_基于Scikit learn库中KNN,SVM算法的笔迹识别

    之前我们用自己写KNN算法[网址]识别了MNIST手写识别数据 [数据下载地址] 这里介绍,如何运用Scikit learn库中的KNN,SVM算法进行笔迹识别. 数据说明: 数据共有785列,第一列 ...

  9. python scikit learn 关闭开源_scikit learn 里没有神经网络?

    本教程的代码和数据来自于 Springboard 的博客教程,希望能为你提供帮助.作者为 Jose Portilla,他是网络教育平台 Udemy 一门数据科学类课程的讲师. GitHub 链接:ht ...

最新文章

  1. 如何创建一个定时管理的页面
  2. win10系统由于服务器出错翻译失败,win10系统下谷歌浏览器翻译失败如何解决
  3. 执行apt-get命令提示没有该命令
  4. Element Select多选选中项溢出选项框
  5. 【必知必会】pro文件及常用配置
  6. oracle逻辑结构(2)
  7. QT4.8.5 显示中文
  8. c# 批量mqtt_c# mqtt服务器
  9. python爬虫网络出错怎么办_python爬虫之headers处理、网络超时问题处理
  10. 杭电计算机组成原理教材答案,杭电计算机组成原理包建课后作业答案详解.doc...
  11. 使用tftp服务把路由器的配置上传到服务器
  12. android圆饼图占比
  13. 浅谈数据中心 IT 机房的空气调节(下篇)-制冷中断
  14. DOC文档转换成WPS格式要怎样操作
  15. python定位二维码_python实现二维码、条形码识别
  16. Node.js Async Await in ES7
  17. Win7Linux双系统下,修复Linux引导。
  18. Win8安装mysq5.6没有MySQL服务
  19. 复盘(一) 【一个身家10亿的国企前辈,送给年轻人的三点成长建议 】
  20. Bootstrap引用Glyphicons 字体图标

热门文章

  1. 【Tools系列】xshell无法使用delete和backspace键该怎么办
  2. 帮你理解一下C语言的指针
  3. 揭秘暗黑僵尸游戏大作是如何创造出来的
  4. duilib仿百度网盘界面
  5. Dojo 创造项目, Build 使用 16
  6. winform之Enter键触发按钮事件
  7. python 人生苦短,我学Python(六)
  8. dnf最新开的服务器地址,DNF跨4服务器即将开启合区 新跨区计划详情简介
  9. 华为路由hilink_多台华为荣耀路由HiLink一键组网功能教程
  10. [摄影初学]正确的相机握持姿势