Curve_Fit 的结果已关闭
Results From Curve_Fit Are Off
我正在尝试使用非线性参数估计从一篇论文中重现一些结果,但是,问题是当我使用 curve_fit
时,我得到的只是一个 1 的数组,而不是任何接近我应该得到的结果的东西。
我提供了一个最简单的工作示例来说明我得到的结果以及实际结果:
import pandas as pd
import numpy as np
from scipy.optimize import curve_fit
xdata = np.array([0.00, 27.01,84.15,134.66,178.74,217.00,250.20,279.06,304.24,
326.29,346.71,362.87,378.13,391.75,403.96,414.96])
ydata = np.array([0.00,440.00,933.00,1154.00,1226.00,1222.00,1185.00,
1134.00,1081.00,1031.00,984.00,942.00,904.00,870.00,840.00,814.00])
# Non-Linear Estimation Function
def func(V,A,d):
return A*V*exp(-1*d*V)
popt, pcov = curve_fit(func,xdata,ydata)
popt
array([1., 1.])
我应该得到的实际结果如下:
param = estimate (standard err)
A = 17.6 (0.132)
d = 5.27 x 10^-3 (2.61 x 10^-5)
如果提供 none,Scipy;s curve_fit() 例程将所有 1.0 值用于初始参数估计。如果 curve_fit() 不能对初始参数估计做出任何改进,它只会 return 它们 - 这就是为什么你得到 "fitted" 所有 1.0 的参数值。这是一个图形化的 Python 拟合器,其中包含您的数据和方程,使用 scipy 的 differential_evolution 遗传算法模块为非线性拟合器提供初始参数估计。该 scipy 模块使用拉丁超立方体算法来确保对参数 space 的彻底搜索,这需要搜索范围。在此示例中,这些边界是从数据最大值和最小值派生的。请注意,为参数提供 范围 比给出具体值要容易得多。
import numpy, scipy, matplotlib
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from scipy.optimize import differential_evolution
import warnings
x = [0.00, 27.01,84.15,134.66,178.74,217.00,250.20,279.06,304.24,
326.29,346.71,362.87,378.13,391.75,403.96,414.96]
y = [0.00,440.00,933.00,1154.00,1226.00,1222.00,1185.00,
1134.00,1081.00,1031.00,984.00,942.00,904.00,870.00,840.00,814.00]
xData = numpy.array(x, dtype=float)
yData = numpy.array(y, dtype=float)
# Non-Linear Estimation Function
def func(V,A,d):
return A*V*numpy.exp(-1.0*d*V)
# function for genetic algorithm to minimize (sum of squared error)
def sumOfSquaredError(parameterTuple):
warnings.filterwarnings("ignore") # do not print warnings by genetic algorithm
val = func(xData, *parameterTuple)
return numpy.sum((yData - val) ** 2.0)
def generate_Initial_Parameters():
# min and max used for bounds
maxX = max(xData)
minX = min(xData)
#maxY = max(yData)
#minY = min(yData)
parameterBounds = []
parameterBounds.append([minX, maxX/10.0]) # search bounds for A
parameterBounds.append([minX, maxX/10.0]) # search bounds for d
# "seed" the numpy random number generator for repeatable results
result = differential_evolution(sumOfSquaredError, parameterBounds, seed=3)
return result.x
# by default, differential_evolution completes by calling curve_fit() using parameter bounds
geneticParameters = generate_Initial_Parameters()
# now call curve_fit without passing bounds from the genetic algorithm,
# just in case the best fit parameters are aoutside those bounds
fittedParameters, pcov = curve_fit(func, xData, yData, geneticParameters)
print('Fitted parameters:', fittedParameters)
print()
modelPredictions = func(xData, *fittedParameters)
absError = modelPredictions - yData
SE = numpy.square(absError) # squared errors
MSE = numpy.mean(SE) # mean squared errors
RMSE = numpy.sqrt(MSE) # Root Mean Squared Error, RMSE
Rsquared = 1.0 - (numpy.var(absError) / numpy.var(yData))
print()
print('RMSE:', RMSE)
print('R-squared:', Rsquared)
print()
##########################################################
# graphics output section
def ModelAndScatterPlot(graphWidth, graphHeight):
f = plt.figure(figsize=(graphWidth/100.0, graphHeight/100.0), dpi=100)
axes = f.add_subplot(111)
# first the raw data as a scatter plot
axes.plot(xData, yData, 'D')
# create data for the fitted equation plot
xModel = numpy.linspace(min(xData), max(xData))
yModel = func(xModel, *fittedParameters)
# now the model as a line plot
axes.plot(xModel, yModel)
axes.set_xlabel('X Data') # X axis data label
axes.set_ylabel('Y Data') # Y axis data label
plt.show()
plt.close('all') # clean up after using pyplot
graphWidth = 800
graphHeight = 600
ModelAndScatterPlot(graphWidth, graphHeight)
我正在尝试使用非线性参数估计从一篇论文中重现一些结果,但是,问题是当我使用 curve_fit
时,我得到的只是一个 1 的数组,而不是任何接近我应该得到的结果的东西。
我提供了一个最简单的工作示例来说明我得到的结果以及实际结果:
import pandas as pd
import numpy as np
from scipy.optimize import curve_fit
xdata = np.array([0.00, 27.01,84.15,134.66,178.74,217.00,250.20,279.06,304.24,
326.29,346.71,362.87,378.13,391.75,403.96,414.96])
ydata = np.array([0.00,440.00,933.00,1154.00,1226.00,1222.00,1185.00,
1134.00,1081.00,1031.00,984.00,942.00,904.00,870.00,840.00,814.00])
# Non-Linear Estimation Function
def func(V,A,d):
return A*V*exp(-1*d*V)
popt, pcov = curve_fit(func,xdata,ydata)
popt
array([1., 1.])
我应该得到的实际结果如下:
param = estimate (standard err)
A = 17.6 (0.132)
d = 5.27 x 10^-3 (2.61 x 10^-5)
Scipy;s curve_fit() 例程将所有 1.0 值用于初始参数估计。如果 curve_fit() 不能对初始参数估计做出任何改进,它只会 return 它们 - 这就是为什么你得到 "fitted" 所有 1.0 的参数值。这是一个图形化的 Python 拟合器,其中包含您的数据和方程,使用 scipy 的 differential_evolution 遗传算法模块为非线性拟合器提供初始参数估计。该 scipy 模块使用拉丁超立方体算法来确保对参数 space 的彻底搜索,这需要搜索范围。在此示例中,这些边界是从数据最大值和最小值派生的。请注意,为参数提供 范围 比给出具体值要容易得多。
import numpy, scipy, matplotlib
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from scipy.optimize import differential_evolution
import warnings
x = [0.00, 27.01,84.15,134.66,178.74,217.00,250.20,279.06,304.24,
326.29,346.71,362.87,378.13,391.75,403.96,414.96]
y = [0.00,440.00,933.00,1154.00,1226.00,1222.00,1185.00,
1134.00,1081.00,1031.00,984.00,942.00,904.00,870.00,840.00,814.00]
xData = numpy.array(x, dtype=float)
yData = numpy.array(y, dtype=float)
# Non-Linear Estimation Function
def func(V,A,d):
return A*V*numpy.exp(-1.0*d*V)
# function for genetic algorithm to minimize (sum of squared error)
def sumOfSquaredError(parameterTuple):
warnings.filterwarnings("ignore") # do not print warnings by genetic algorithm
val = func(xData, *parameterTuple)
return numpy.sum((yData - val) ** 2.0)
def generate_Initial_Parameters():
# min and max used for bounds
maxX = max(xData)
minX = min(xData)
#maxY = max(yData)
#minY = min(yData)
parameterBounds = []
parameterBounds.append([minX, maxX/10.0]) # search bounds for A
parameterBounds.append([minX, maxX/10.0]) # search bounds for d
# "seed" the numpy random number generator for repeatable results
result = differential_evolution(sumOfSquaredError, parameterBounds, seed=3)
return result.x
# by default, differential_evolution completes by calling curve_fit() using parameter bounds
geneticParameters = generate_Initial_Parameters()
# now call curve_fit without passing bounds from the genetic algorithm,
# just in case the best fit parameters are aoutside those bounds
fittedParameters, pcov = curve_fit(func, xData, yData, geneticParameters)
print('Fitted parameters:', fittedParameters)
print()
modelPredictions = func(xData, *fittedParameters)
absError = modelPredictions - yData
SE = numpy.square(absError) # squared errors
MSE = numpy.mean(SE) # mean squared errors
RMSE = numpy.sqrt(MSE) # Root Mean Squared Error, RMSE
Rsquared = 1.0 - (numpy.var(absError) / numpy.var(yData))
print()
print('RMSE:', RMSE)
print('R-squared:', Rsquared)
print()
##########################################################
# graphics output section
def ModelAndScatterPlot(graphWidth, graphHeight):
f = plt.figure(figsize=(graphWidth/100.0, graphHeight/100.0), dpi=100)
axes = f.add_subplot(111)
# first the raw data as a scatter plot
axes.plot(xData, yData, 'D')
# create data for the fitted equation plot
xModel = numpy.linspace(min(xData), max(xData))
yModel = func(xModel, *fittedParameters)
# now the model as a line plot
axes.plot(xModel, yModel)
axes.set_xlabel('X Data') # X axis data label
axes.set_ylabel('Y Data') # Y axis data label
plt.show()
plt.close('all') # clean up after using pyplot
graphWidth = 800
graphHeight = 600
ModelAndScatterPlot(graphWidth, graphHeight)