Python scipy.optimise.curve_fit 给出线性拟合
Python scipy.optimise.curve_fit gives linear fit
我在使用 scipy 的 curve_fit 的参数时遇到了问题。我最初复制了文档建议的代码。然后我稍微改变了等式,它很好,但是增加了 np.linspace,整个预测最终变成了一条直线。有什么想法吗?
import numpy as np
from scipy.optimize import curve_fit
import matplotlib.pyplot as plt
def f(x, a, b, c):
# This works fine on smaller numbers
return (a - c) * np.exp(-x / b) + c
xdata = np.linspace(60, 3060, 200)
ydata = f(xdata, 100, 400, 20)
# noise
np.random.seed(1729)
ydata = ydata + np.random.normal(size=xdata.size) * 0.2
# graph
fig, ax = plt.subplots()
plt.plot(xdata, ydata, marker="o")
pred, covar = curve_fit(f, xdata, ydata)
plt.plot(xdata, f(xdata, *pred), label="prediciton")
plt.show()
您可能需要从更好的猜测开始,默认初始猜测 (1.0, 1.0, 1.0)
似乎在发散区域。
我使用了初始猜测 p0 = (50,200,100)
并且有效
fig, ax = plt.subplots()
plt.plot(xdata, ydata, marker="o")
pred, covar = curve_fit(f, xdata, ydata, p0 = (50,200,100))
plt.plot(xdata, f(xdata, *pred), label="prediciton")
plt.show()
这是使用您的数据和方程式的示例代码,初始参数估计值由 scipy 的 differential_evolution 遗传算法模块给出。该模块使用拉丁超立方体算法来确保彻底搜索参数 space,这需要搜索范围。在此示例中,这些边界取自数据的最大值和最小值。为初始参数估计提供 范围 比提供具体值要容易得多。
import numpy, scipy, matplotlib
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from scipy.optimize import differential_evolution
import warnings
def func(x, a, b, c):
return (a - c) * numpy.exp(-x / b) + c
xData = numpy.linspace(60, 3060, 200)
yData = func(xData, 100, 400, 20)
# noise
numpy.random.seed(1729)
yData = yData + numpy.random.normal(size=xData.size) * 0.2
# function for genetic algorithm to minimize (sum of squared error)
def sumOfSquaredError(parameterTuple):
warnings.filterwarnings("ignore") # do not print warnings by genetic algorithm
val = func(xData, *parameterTuple)
return numpy.sum((yData - val) ** 2.0)
def generate_Initial_Parameters():
# min and max used for bounds
maxX = max(xData)
minX = min(xData)
maxY = max(yData)
minY = min(yData)
parameterBounds = []
parameterBounds.append([minY, maxY]) # search bounds for a
parameterBounds.append([minX, maxX]) # search bounds for b
parameterBounds.append([minY, maxY]) # search bounds for c
# "seed" the numpy random number generator for repeatable results
result = differential_evolution(sumOfSquaredError, parameterBounds, seed=3)
return result.x
# by default, differential_evolution completes by calling curve_fit() using parameter bounds
geneticParameters = generate_Initial_Parameters()
# now call curve_fit without passing bounds from the genetic algorithm,
# just in case the best fit parameters are aoutside those bounds
fittedParameters, pcov = curve_fit(func, xData, yData, geneticParameters)
print('Fitted parameters:', fittedParameters)
print()
modelPredictions = func(xData, *fittedParameters)
absError = modelPredictions - yData
SE = numpy.square(absError) # squared errors
MSE = numpy.mean(SE) # mean squared errors
RMSE = numpy.sqrt(MSE) # Root Mean Squared Error, RMSE
Rsquared = 1.0 - (numpy.var(absError) / numpy.var(yData))
print()
print('RMSE:', RMSE)
print('R-squared:', Rsquared)
print()
##########################################################
# graphics output section
def ModelAndScatterPlot(graphWidth, graphHeight):
f = plt.figure(figsize=(graphWidth/100.0, graphHeight/100.0), dpi=100)
axes = f.add_subplot(111)
# first the raw data as a scatter plot
axes.plot(xData, yData, 'D')
# create data for the fitted equation plot
xModel = numpy.linspace(min(xData), max(xData))
yModel = func(xModel, *fittedParameters)
# now the model as a line plot
axes.plot(xModel, yModel)
axes.set_xlabel('X Data') # X axis data label
axes.set_ylabel('Y Data') # Y axis data label
plt.show()
plt.close('all') # clean up after using pyplot
graphWidth = 800
graphHeight = 600
ModelAndScatterPlot(graphWidth, graphHeight)
这是由于 Levenberg–Marquardt algorithm which curve_fit
默认使用的限制。使用它的好方法是在优化之前为参数提供一些不错的初始猜测。根据我的经验,这在优化像您的示例这样的指数函数时尤为重要。使用 LM 等迭代算法,起点的质量决定了结果收敛的位置。您拥有的参数越多,您的最终结果就越有可能收敛到一条完全不需要的曲线。总的来说,解决方案是像其他答案一样以某种方式找到一个好的初始猜测。
我在使用 scipy 的 curve_fit 的参数时遇到了问题。我最初复制了文档建议的代码。然后我稍微改变了等式,它很好,但是增加了 np.linspace,整个预测最终变成了一条直线。有什么想法吗?
import numpy as np
from scipy.optimize import curve_fit
import matplotlib.pyplot as plt
def f(x, a, b, c):
# This works fine on smaller numbers
return (a - c) * np.exp(-x / b) + c
xdata = np.linspace(60, 3060, 200)
ydata = f(xdata, 100, 400, 20)
# noise
np.random.seed(1729)
ydata = ydata + np.random.normal(size=xdata.size) * 0.2
# graph
fig, ax = plt.subplots()
plt.plot(xdata, ydata, marker="o")
pred, covar = curve_fit(f, xdata, ydata)
plt.plot(xdata, f(xdata, *pred), label="prediciton")
plt.show()
您可能需要从更好的猜测开始,默认初始猜测 (1.0, 1.0, 1.0)
似乎在发散区域。
我使用了初始猜测 p0 = (50,200,100)
并且有效
fig, ax = plt.subplots()
plt.plot(xdata, ydata, marker="o")
pred, covar = curve_fit(f, xdata, ydata, p0 = (50,200,100))
plt.plot(xdata, f(xdata, *pred), label="prediciton")
plt.show()
这是使用您的数据和方程式的示例代码,初始参数估计值由 scipy 的 differential_evolution 遗传算法模块给出。该模块使用拉丁超立方体算法来确保彻底搜索参数 space,这需要搜索范围。在此示例中,这些边界取自数据的最大值和最小值。为初始参数估计提供 范围 比提供具体值要容易得多。
import numpy, scipy, matplotlib
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from scipy.optimize import differential_evolution
import warnings
def func(x, a, b, c):
return (a - c) * numpy.exp(-x / b) + c
xData = numpy.linspace(60, 3060, 200)
yData = func(xData, 100, 400, 20)
# noise
numpy.random.seed(1729)
yData = yData + numpy.random.normal(size=xData.size) * 0.2
# function for genetic algorithm to minimize (sum of squared error)
def sumOfSquaredError(parameterTuple):
warnings.filterwarnings("ignore") # do not print warnings by genetic algorithm
val = func(xData, *parameterTuple)
return numpy.sum((yData - val) ** 2.0)
def generate_Initial_Parameters():
# min and max used for bounds
maxX = max(xData)
minX = min(xData)
maxY = max(yData)
minY = min(yData)
parameterBounds = []
parameterBounds.append([minY, maxY]) # search bounds for a
parameterBounds.append([minX, maxX]) # search bounds for b
parameterBounds.append([minY, maxY]) # search bounds for c
# "seed" the numpy random number generator for repeatable results
result = differential_evolution(sumOfSquaredError, parameterBounds, seed=3)
return result.x
# by default, differential_evolution completes by calling curve_fit() using parameter bounds
geneticParameters = generate_Initial_Parameters()
# now call curve_fit without passing bounds from the genetic algorithm,
# just in case the best fit parameters are aoutside those bounds
fittedParameters, pcov = curve_fit(func, xData, yData, geneticParameters)
print('Fitted parameters:', fittedParameters)
print()
modelPredictions = func(xData, *fittedParameters)
absError = modelPredictions - yData
SE = numpy.square(absError) # squared errors
MSE = numpy.mean(SE) # mean squared errors
RMSE = numpy.sqrt(MSE) # Root Mean Squared Error, RMSE
Rsquared = 1.0 - (numpy.var(absError) / numpy.var(yData))
print()
print('RMSE:', RMSE)
print('R-squared:', Rsquared)
print()
##########################################################
# graphics output section
def ModelAndScatterPlot(graphWidth, graphHeight):
f = plt.figure(figsize=(graphWidth/100.0, graphHeight/100.0), dpi=100)
axes = f.add_subplot(111)
# first the raw data as a scatter plot
axes.plot(xData, yData, 'D')
# create data for the fitted equation plot
xModel = numpy.linspace(min(xData), max(xData))
yModel = func(xModel, *fittedParameters)
# now the model as a line plot
axes.plot(xModel, yModel)
axes.set_xlabel('X Data') # X axis data label
axes.set_ylabel('Y Data') # Y axis data label
plt.show()
plt.close('all') # clean up after using pyplot
graphWidth = 800
graphHeight = 600
ModelAndScatterPlot(graphWidth, graphHeight)
这是由于 Levenberg–Marquardt algorithm which curve_fit
默认使用的限制。使用它的好方法是在优化之前为参数提供一些不错的初始猜测。根据我的经验,这在优化像您的示例这样的指数函数时尤为重要。使用 LM 等迭代算法,起点的质量决定了结果收敛的位置。您拥有的参数越多,您的最终结果就越有可能收敛到一条完全不需要的曲线。总的来说,解决方案是像其他答案一样以某种方式找到一个好的初始猜测。