无法使用 scipy.optimize.curve_fit() 拟合函数

Not able to fit a function with scipy.optimize.curve_fit()

我想安装以下功能:

def invlaplace_stehfest2(time,EL,tau):
    upsilon=0.25
    pmax=6.9
    E0=0.0154
    M=8
    results=[]
    for t in time:
        func=0
        for k in range(1,2*M+1):
            SUM=0
            for j in range(int(math.floor((k+1)/2)),min(k,M)+1):
                dummy=j**(M+1)*scipy.special.binom(M,j)*scipy.special.binom(2*j,j)*scipy.special.binom(j,k-j)/scipy.math.factorial(M)
                SUM+=dummy
            s=k*math.log(2)/t[enter image description here][1]
            func+=(-1)**(M+k)*SUM*pmax*EL/(mp.exp(tau*s)*mp.expint(1,tau*s)*E0+EL)/s

        func=func*math.log(2)/t
        results.append(func)
    return  [float(i) for i in results]

为此,我使用了以下数据:

data_time=np.array([69.0,99.0,139.0,179.0,219.0,259.0,295.5,299.03])
data_relax=np.array([6.2536,6.1652,6.0844,6.0253,5.9782,5.9404,5.9104,5.9066])

猜测如下:

guess=np.array([0.1,0.05])

和 scipy.optimize.curve_fit() 如下:

  Parameter,Covariance=scipy.optimize.curve_fit(invlaplace_stehfest2,data_time,data_relax,guess)

由于我不明白的原因,我无法正确拟合数据。我得到以下图表。

Bad fitting

我的函数毫无疑问是正确的,因为当我使用正确的猜测时:

guess=np.array([0.33226685047281592707364253044085038793404361200072,8.6682623502960394383501102909774397295654841654769])

我能够正确拟合我的数据。

Expected fitting

有什么关于为什么我不能正确适应的提示吗?我应该使用其他方法吗?

这里是孔程序:

##############################################
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import matplotlib.pylab as mplab
import math
from math import *
import numpy as np
import scipy
from scipy.optimize import curve_fit
import mpmath as mp

############################################################################################
def invlaplace_stehfest2(time,EL,tau):
    upsilon=0.25
    pmax=6.9
    E0=0.0154
    M=8
    results=[]
    for t in time:
        func=0
        for k in range(1,2*M+1):
            SUM=0
            for j in range(int(math.floor((k+1)/2)),min(k,M)+1):
                dummy=j**(M+1)*scipy.special.binom(M,j)*scipy.special.binom(2*j,j)*scipy.special.binom(j,k-j)/scipy.math.factorial(M)
                SUM+=dummy
            s=k*math.log(2)/t
            func+=(-1)**(M+k)*SUM*pmax*EL/(mp.exp(tau*s)*mp.expint(1,tau*s)*E0+EL)/s

        func=func*math.log(2)/t
        results.append(func)
    return  [float(i) for i in results]


############################################################################################    

###Constant###

####Value####
data_time=np.array([69.0,99.0,139.0,179.0,219.0,259.0,295.5,299.03])
data_relax=np.array([6.2536,6.1652,6.0844,6.0253,5.9782,5.9404,5.9104,5.9066])


###Fitting###
guess=np.array([0.33226685047281592707364253044085038793404361200072,8.6682623502960394383501102909774397295654841654769])
#guess=np.array([0.1,0.05])
Parameter,Covariance=scipy.optimize.curve_fit(invlaplace_stehfest2,data_time,data_relax,guess)
print Parameter
residu=sum(data_relax-invlaplace_stehfest2(data_time,Parameter[0],Parameter[1]))


Graph_Curves=plt.figure()
ax = Graph_Curves.add_subplot(111)
ax.plot(data_time,invlaplace_stehfest2(data_time,Parameter[0],Parameter[1]),"-")
ax.plot(data_time,data_relax,"o")
plt.show()

非线性拟合器,例如 scipy.optimize.curve_fit() 中使用的默认 Levenberg-Marquardt 求解器,与大多数迭代求解器一样,可以在错误 space 处停止在局部最小值。如果错误 space 是 "bumpy" 那么初始参数估计就变得非常重要,就像在这种情况下一样。

Scipy在优化模块中添加了差分进化遗传算法,可用于确定curve_fit()的初始参数估计。 Scipy 的实现使用拉丁超立方体算法来确保对参数 space 的彻底搜索,需要搜索参数的上限和下限。正如您在下面看到的,我已经使用这个 scipy 模块来替换代码中名为 "guess" 的值的硬编码值。这不会 运行 很快,但是稍微慢一些的正确结果比快速错误的结果要好得多。试试这个代码:

import matplotlib
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import matplotlib.pylab as mplab
import math
from math import *
import numpy as np
import scipy
from scipy.optimize import curve_fit

from scipy.optimize import differential_evolution

import mpmath as mp

############################################################################################
def invlaplace_stehfest2(time,EL,tau):
    upsilon=0.25
    pmax=6.9
    E0=0.0154
    M=8
    results=[]
    for t in time:
        func=0
        for k in range(1,2*M+1):
            SUM=0
            for j in range(int(math.floor((k+1)/2)),min(k,M)+1):
                dummy=j**(M+1)*scipy.special.binom(M,j)*scipy.special.binom(2*j,j)*scipy.special.binom(j,k-j)/scipy.math.factorial(M)
                SUM+=dummy
            s=k*math.log(2)/t
            func+=(-1)**(M+k)*SUM*pmax*EL/(mp.exp(tau*s)*mp.expint(1,tau*s)*E0+EL)/s

        func=func*math.log(2)/t
        results.append(func)
    return  [float(i) for i in results]


############################################################################################    

###Constant###

####Value####
data_time=np.array([69.0,99.0,139.0,179.0,219.0,259.0,295.5,299.03])
data_relax=np.array([6.2536,6.1652,6.0844,6.0253,5.9782,5.9404,5.9104,5.9066])

# function for genetic algorithm to minimize (sum of squared error)
def sumOfSquaredError(parameterTuple):
    return np.sum((data_relax - invlaplace_stehfest2(data_time, *parameterTuple)) ** 2)

###Fitting###
#guess=np.array([0.33226685047281592707364253044085038793404361200072,8.6682623502960394383501102909774397295654841654769])
#guess=np.array([0.1,0.05])

parameterBounds = [[0.0, 1.0], [0.0, 10.0]]
# "seed" the numpy random number generator for repeatable results
# note the ".x" here to return only the parameter estimates in this example
guess = differential_evolution(sumOfSquaredError, parameterBounds, seed=3).x

Parameter,Covariance=scipy.optimize.curve_fit(invlaplace_stehfest2,data_time,data_relax,guess)
print Parameter
residu=sum(data_relax-invlaplace_stehfest2(data_time,Parameter[0],Parameter[1]))


Graph_Curves=plt.figure()
ax = Graph_Curves.add_subplot(111)
ax.plot(data_time,invlaplace_stehfest2(data_time,Parameter[0],Parameter[1]),"-")
ax.plot(data_time,data_relax,"o")
plt.show()