我试着用指数衰减曲线拟合信号. 我想约束拟合曲线总是在信号之下. 我如何添加这样的限制? 我try 了一些有剩余功能的惩罚,但适合不好

这里有一个最小的例子

import matplotlib.pyplot as plt
import numpy as np
from scipy.optimize import curve_fit,leastsq

y = np.array([0.13598974610162404,0.14204518683071268,0.12950580786633123,0.11907324299581903,0.10128368784179803,0.09801605741178761,0.08384607033484785,0.080831165652505,0.08320697432504208,0.0796448643292049,0.08036960780924939,0.07794871929139761,0.06684868128842808,0.08473240868175465,0.12911858937102086,0.2643875667237164,0.35984364939831903,0.2193622531576059,0.11434823952113388,0.07542004424929072,0.05811782617304745,0.05244297390163204,0.046658695718735835,0.04848192538027753,0.04720951580680828,0.043285109240216044,0.04182209865781944,0.039844899409411334,0.03462168053862101,0.03378305258506322,0.03533297573624328,0.03434759644082368,0.033784129758841895,0.030419029760045915,0.028085746545496386,0.02614296782807577,0.024221565132520304,0.022189741126251487,0.02093159168492871,0.02041496822457043,0.021031182865802436,0.024510234374072886,0.023307213889378165,0.0267484745286596,0.02258945483736504,0.014891232218542747,0.01151363712852099,0.010139967470707011,0.009769727537338574,0.009323591440734363,0.008852570111374145,0.008277064263333187,0.007088585763561308,0.00607584327561278,0.005423044957885124,0.005017536008889349,0.005194048550726604,0.005066069823795679,0.004923514285732114,0.0053721924337601975,0.005156078360383089,0.004962157137571195,0.0045958264654801136,0.0043323942880189766,0.004310971039183395,0.004733498071711899,0.005238905827304569,0.005180319290046715,0.0050892994891999395,0.005323200339923676,0.005430819354625569,0.0051261318575094965,0.004608215352126279,0.0042522740751442835,0.003964475580118653,0.004281845094328685,0.003932866994198572,0.003751478035379218,0.003988758544406512,0.00366304957414055,0.0030455636180720283,0.0027753884456863088,0.0025920006620398267,0.00253411154251131,0.0024133671863316246,0.0020164600081521793,0.002294208143652257,0.0021879013667402856,0.00213873257081609,0.0019997327222615736,0.00195034020886016,0.0022503784328324725,0.003038201783164678,0.003603415824772916,0.003642976691503975,0.003263887163622944,0.0035506429555724373,0.0047798428190157045,0.0040553738896165386,0.002473176007612183,0.0025941258844692236,0.0018292994313265358,0.00209892075806378,0.0023955564365646335,0.0020375114833779307,0.002260575557815427,0.0022985835848993693,0.002099406433733155,0.0018586368200849512,0.0016053613868235123,0.001438613175578214,0.00143049357541102,0.0013095127315154774,0.001262471540939509,0.0013514522407795408,0.001605619634800475,0.001961075896285937,0.001865266816887284,0.0023526578031602017,0.00246341280674717,0.0025884459641316543,0.0025289043233280195,0.0027480853600970576,0.003160811294269662,0.003061310957205347,0.0034708227008575852,0.0027193887970078795,0.0025019043062104967,0.001721602287020676,0.0014938287993981696,0.001379701311142287,0.001482278335951954,0.0017739654977338047,0.0016173740322614279,0.0014568993700072393,0.001561687803455451,0.0016478201019948435,0.001296045775857753,0.001237797494806695,0.0014233100660923912,0.001327643348684166,0.0012058468589450113,0.001326993796471779,0.0015302363900395407,0.0019691433239499958,0.001914607620254396,0.0017054233649494027,0.001999944948934884,0.001586257522693384,0.0017888302317418617,0.0024194552369763127,0.002602486169233071,0.0023322619326367703,0.002188641252143114,0.002160637896948486,0.0017183240941773745,0.0013791696278384316,0.0013010975606518034,0.0012917607493148195,0.0014473287423454842,0.0011277134770190562,0.0009788023156115833,0.0011624520875172602,0.0011529250281587956,0.0011286272690398862,0.0011650110432320925,0.0011670732824154513,0.0012701258601414223,0.0010863631780132393,0.001151403997327795,0.001261531100583112,0.0014433469612850924,0.0012625181480229021,0.0013366719381237742,0.0013129577294860868,0.0010799358566476144,0.0012361331567450533,0.0013155633998451644,0.0017427549165517102,0.0017117554798138019,0.0014424582600283703,0.0014934381441740442,0.001320132472902865,0.0010134949123866623,0.0009392144030905535,0.0008956207514417853,0.0009483482891766875,0.0007118586291810097,0.0006572633034661715,0.0006246206878692327])
x = np.array([1.1,1.2000000000000002,1.3,1.4000000000000001,1.5,1.6,1.7000000000000002,1.8,1.9000000000000001,2.0,2.1,2.2,2.3000000000000003,2.4000000000000004,2.5,2.6,2.7,2.8000000000000003,2.9000000000000004,3.0,3.1,3.2,3.3000000000000003,3.4000000000000004,3.5,3.6,3.7,3.8000000000000003,3.9000000000000004,4.0,4.1000000000000005,4.2,4.3,4.4,4.5,4.6000000000000005,4.7,4.800000000000001,4.9,5.0,5.1000000000000005,5.2,5.300000000000001,5.4,5.5,5.6000000000000005,5.7,5.800000000000001,5.9,6.0,6.1000000000000005,6.2,6.300000000000001,6.4,6.5,6.6000000000000005,6.7,6.800000000000001,6.9,7.0,7.1000000000000005,7.2,7.300000000000001,7.4,7.5,7.6000000000000005,7.7,7.800000000000001,7.9,8.0,8.1,8.200000000000001,8.3,8.4,8.5,8.6,8.700000000000001,8.8,8.9,9.0,9.1,9.200000000000001,9.3,9.4,9.5,9.600000000000001,9.700000000000001,9.8,9.9,10.0,10.100000000000001,10.200000000000001,10.3,10.4,10.5,10.600000000000001,10.700000000000001,10.8,10.9,11.0,11.100000000000001,11.200000000000001,11.3,11.4,11.5,11.600000000000001,11.700000000000001,11.8,11.9,12.0,12.100000000000001,12.200000000000001,12.3,12.4,12.5,12.600000000000001,12.700000000000001,12.8,12.9,13.0,13.100000000000001,13.200000000000001,13.3,13.4,13.5,13.600000000000001,13.700000000000001,13.8,13.9,14.0,14.100000000000001,14.200000000000001,14.3,14.4,14.5,14.600000000000001,14.700000000000001,14.8,14.9,15.0,15.100000000000001,15.200000000000001,15.3,15.4,15.5,15.600000000000001,15.700000000000001,15.8,15.9,16.0,16.1,16.2,16.3,16.400000000000002,16.5,16.6,16.7,16.8,16.900000000000002,17.0,17.1,17.2,17.3,17.400000000000002,17.5,17.6,17.7,17.8,17.900000000000002,18.0,18.1,18.2,18.3,18.400000000000002,18.5,18.6,18.7,18.8,18.900000000000002,19.0,19.1,19.200000000000003,19.3,19.400000000000002,19.5,19.6,19.700000000000003,19.8,19.900000000000002,20.0])


def funcExp(x, a, b, c):
    return a * np.exp(-b * x) + c

# here you include the penalization factor
def residuals(p, x, y):
    est  =  funcExp(x, p[0], p[1], p[2])
    penalization = y - funcExp(x, p[0], p[1], p[2] )
    penalization[penalization<0] = 0
    penaliz = np.abs(np.sum(penalization))
    return y - funcExp(x, p[0], p[1], p[2] ) - penalization

popt, pcov = curve_fit(funcExp, x, y,p0=[y[0], 1, y[-1]])
popt2, pcov2 = leastsq(func=residuals, x0=(y[0], 1, y[-1]), args=(x, y))

fig, ax = plt.subplots()
ax.plot(x,y )
ax.plot(x,funcExp(x, popt[0], popt[1], popt[2]),'r' )
ax.plot(x,funcExp(x, popt2[0], popt2[1], popt2[2]),'g' )
plt.show()

which gives : enter image description here

推荐答案

这是一个指数函数,因此对最小平方误差和绘图都使用对数刻度.使用下包络约束;工作正常—

import matplotlib.pyplot as plt
import numpy as np
from scipy.optimize import minimize, Bounds, NonlinearConstraint

y_exper = np.array([0.13598974610162404,0.14204518683071268,0.12950580786633123,0.11907324299581903,0.10128368784179803,0.09801605741178761,0.08384607033484785,0.080831165652505,0.08320697432504208,0.0796448643292049,0.08036960780924939,0.07794871929139761,0.06684868128842808,0.08473240868175465,0.12911858937102086,0.2643875667237164,0.35984364939831903,0.2193622531576059,0.11434823952113388,0.07542004424929072,0.05811782617304745,0.05244297390163204,0.046658695718735835,0.04848192538027753,0.04720951580680828,0.043285109240216044,0.04182209865781944,0.039844899409411334,0.03462168053862101,0.03378305258506322,0.03533297573624328,0.03434759644082368,0.033784129758841895,0.030419029760045915,0.028085746545496386,0.02614296782807577,0.024221565132520304,0.022189741126251487,0.02093159168492871,0.02041496822457043,0.021031182865802436,0.024510234374072886,0.023307213889378165,0.0267484745286596,0.02258945483736504,0.014891232218542747,0.01151363712852099,0.010139967470707011,0.009769727537338574,0.009323591440734363,0.008852570111374145,0.008277064263333187,0.007088585763561308,0.00607584327561278,0.005423044957885124,0.005017536008889349,0.005194048550726604,0.005066069823795679,0.004923514285732114,0.0053721924337601975,0.005156078360383089,0.004962157137571195,0.0045958264654801136,0.0043323942880189766,0.004310971039183395,0.004733498071711899,0.005238905827304569,0.005180319290046715,0.0050892994891999395,0.005323200339923676,0.005430819354625569,0.0051261318575094965,0.004608215352126279,0.0042522740751442835,0.003964475580118653,0.004281845094328685,0.003932866994198572,0.003751478035379218,0.003988758544406512,0.00366304957414055,0.0030455636180720283,0.0027753884456863088,0.0025920006620398267,0.00253411154251131,0.0024133671863316246,0.0020164600081521793,0.002294208143652257,0.0021879013667402856,0.00213873257081609,0.0019997327222615736,0.00195034020886016,0.0022503784328324725,0.003038201783164678,0.003603415824772916,0.003642976691503975,0.003263887163622944,0.0035506429555724373,0.0047798428190157045,0.0040553738896165386,0.002473176007612183,0.0025941258844692236,0.0018292994313265358,0.00209892075806378,0.0023955564365646335,0.0020375114833779307,0.002260575557815427,0.0022985835848993693,0.002099406433733155,0.0018586368200849512,0.0016053613868235123,0.001438613175578214,0.00143049357541102,0.0013095127315154774,0.001262471540939509,0.0013514522407795408,0.001605619634800475,0.001961075896285937,0.001865266816887284,0.0023526578031602017,0.00246341280674717,0.0025884459641316543,0.0025289043233280195,0.0027480853600970576,0.003160811294269662,0.003061310957205347,0.0034708227008575852,0.0027193887970078795,0.0025019043062104967,0.001721602287020676,0.0014938287993981696,0.001379701311142287,0.001482278335951954,0.0017739654977338047,0.0016173740322614279,0.0014568993700072393,0.001561687803455451,0.0016478201019948435,0.001296045775857753,0.001237797494806695,0.0014233100660923912,0.001327643348684166,0.0012058468589450113,0.001326993796471779,0.0015302363900395407,0.0019691433239499958,0.001914607620254396,0.0017054233649494027,0.001999944948934884,0.001586257522693384,0.0017888302317418617,0.0024194552369763127,0.002602486169233071,0.0023322619326367703,0.002188641252143114,0.002160637896948486,0.0017183240941773745,0.0013791696278384316,0.0013010975606518034,0.0012917607493148195,0.0014473287423454842,0.0011277134770190562,0.0009788023156115833,0.0011624520875172602,0.0011529250281587956,0.0011286272690398862,0.0011650110432320925,0.0011670732824154513,0.0012701258601414223,0.0010863631780132393,0.001151403997327795,0.001261531100583112,0.0014433469612850924,0.0012625181480229021,0.0013366719381237742,0.0013129577294860868,0.0010799358566476144,0.0012361331567450533,0.0013155633998451644,0.0017427549165517102,0.0017117554798138019,0.0014424582600283703,0.0014934381441740442,0.001320132472902865,0.0010134949123866623,0.0009392144030905535,0.0008956207514417853,0.0009483482891766875,0.0007118586291810097,0.0006572633034661715,0.0006246206878692327])
x_exper = np.array([1.1,1.2000000000000002,1.3,1.4000000000000001,1.5,1.6,1.7000000000000002,1.8,1.9000000000000001,2.0,2.1,2.2,2.3000000000000003,2.4000000000000004,2.5,2.6,2.7,2.8000000000000003,2.9000000000000004,3.0,3.1,3.2,3.3000000000000003,3.4000000000000004,3.5,3.6,3.7,3.8000000000000003,3.9000000000000004,4.0,4.1000000000000005,4.2,4.3,4.4,4.5,4.6000000000000005,4.7,4.800000000000001,4.9,5.0,5.1000000000000005,5.2,5.300000000000001,5.4,5.5,5.6000000000000005,5.7,5.800000000000001,5.9,6.0,6.1000000000000005,6.2,6.300000000000001,6.4,6.5,6.6000000000000005,6.7,6.800000000000001,6.9,7.0,7.1000000000000005,7.2,7.300000000000001,7.4,7.5,7.6000000000000005,7.7,7.800000000000001,7.9,8.0,8.1,8.200000000000001,8.3,8.4,8.5,8.6,8.700000000000001,8.8,8.9,9.0,9.1,9.200000000000001,9.3,9.4,9.5,9.600000000000001,9.700000000000001,9.8,9.9,10.0,10.100000000000001,10.200000000000001,10.3,10.4,10.5,10.600000000000001,10.700000000000001,10.8,10.9,11.0,11.100000000000001,11.200000000000001,11.3,11.4,11.5,11.600000000000001,11.700000000000001,11.8,11.9,12.0,12.100000000000001,12.200000000000001,12.3,12.4,12.5,12.600000000000001,12.700000000000001,12.8,12.9,13.0,13.100000000000001,13.200000000000001,13.3,13.4,13.5,13.600000000000001,13.700000000000001,13.8,13.9,14.0,14.100000000000001,14.200000000000001,14.3,14.4,14.5,14.600000000000001,14.700000000000001,14.8,14.9,15.0,15.100000000000001,15.200000000000001,15.3,15.4,15.5,15.600000000000001,15.700000000000001,15.8,15.9,16.0,16.1,16.2,16.3,16.400000000000002,16.5,16.6,16.7,16.8,16.900000000000002,17.0,17.1,17.2,17.3,17.400000000000002,17.5,17.6,17.7,17.8,17.900000000000002,18.0,18.1,18.2,18.3,18.400000000000002,18.5,18.6,18.7,18.8,18.900000000000002,19.0,19.1,19.200000000000003,19.3,19.400000000000002,19.5,19.6,19.700000000000003,19.8,19.900000000000002,20.0])


def funcExp(x: np.ndarray, a: float, b: float, c: float) -> np.ndarray:
    return a*np.exp(-b*x) + c


def log_residuals(params: np.ndarray) -> float:
    y = funcExp(x_exper, *params)
    error = np.log(y) - np.log(y_exper)
    return error.dot(error)  # least squares


def lower_envelope(params: np.ndarray) -> np.ndarray:
    y = funcExp(x_exper, *params)
    return y_exper - y


x0 = y_exper[0]*np.exp(x_exper[0]), 1, y_exper[-1]
result = minimize(
    fun=log_residuals, x0=x0,
    bounds=Bounds(
        lb=(0.01, 0.1, -0.1),
        ub=(0.5, 20, 0.1),
    ),
    constraints=NonlinearConstraint(
        fun=lower_envelope, lb=0, ub=np.inf,
    ),
)
assert result.success, result.message

print(result.x)

fig, ax = plt.subplots()
ax.semilogy(x_exper, y_exper, label='experiment')
ax.semilogy(x_exper, funcExp(x_exper, *x0), label='guess')
ax.semilogy(x_exper, funcExp(x_exper, *result.x), label='fit')
ax.legend()
plt.show()
[0.2157369 0.5899542 0.000623 ]

envelope fit

如果有一个更合理的初始猜测和定义的Jacobian,这将表现得更好:

from functools import partial

import matplotlib.pyplot as plt
import numpy as np
from scipy.optimize import check_grad, minimize, Bounds, NonlinearConstraint


def load_data() -> tuple[np.ndarray, np.ndarray]:
    x_exper = np.arange(1.1, 20.05, 0.1)
    y_exper = np.array([
        0.13598974610162404, 0.14204518683071268, 0.12950580786633123, 0.11907324299581903,
        0.10128368784179803, 0.09801605741178761, 0.08384607033484785, 0.080831165652505,
        0.08320697432504208, 0.0796448643292049, 0.08036960780924939, 0.07794871929139761,
        0.06684868128842808, 0.08473240868175465, 0.12911858937102086, 0.2643875667237164,
        0.35984364939831903, 0.2193622531576059, 0.11434823952113388, 0.07542004424929072,
        0.05811782617304745, 0.05244297390163204, 0.046658695718735835, 0.04848192538027753,
        0.04720951580680828, 0.043285109240216044, 0.04182209865781944, 0.039844899409411334,
        0.03462168053862101, 0.03378305258506322, 0.03533297573624328, 0.03434759644082368,
        0.033784129758841895, 0.030419029760045915, 0.028085746545496386, 0.02614296782807577,
        0.024221565132520304, 0.022189741126251487, 0.02093159168492871, 0.02041496822457043,
        0.021031182865802436, 0.024510234374072886, 0.023307213889378165, 0.0267484745286596,
        0.02258945483736504, 0.014891232218542747, 0.01151363712852099, 0.010139967470707011,
        0.009769727537338574, 0.009323591440734363, 0.008852570111374145, 0.008277064263333187,
        0.007088585763561308, 0.00607584327561278, 0.005423044957885124, 0.005017536008889349,
        0.005194048550726604, 0.005066069823795679, 0.004923514285732114, 0.0053721924337601975,
        0.005156078360383089, 0.004962157137571195, 0.0045958264654801136, 0.0043323942880189766,
        0.004310971039183395, 0.004733498071711899, 0.005238905827304569, 0.005180319290046715,
        0.0050892994891999395, 0.005323200339923676, 0.005430819354625569, 0.0051261318575094965,
        0.004608215352126279, 0.0042522740751442835, 0.003964475580118653, 0.004281845094328685,
        0.003932866994198572, 0.003751478035379218, 0.003988758544406512, 0.00366304957414055,
        0.0030455636180720283, 0.0027753884456863088, 0.0025920006620398267, 0.00253411154251131,
        0.0024133671863316246, 0.0020164600081521793, 0.002294208143652257, 0.0021879013667402856,
        0.00213873257081609, 0.0019997327222615736, 0.00195034020886016, 0.0022503784328324725,
        0.003038201783164678, 0.003603415824772916, 0.003642976691503975, 0.003263887163622944,
        0.0035506429555724373, 0.0047798428190157045, 0.0040553738896165386, 0.002473176007612183,
        0.0025941258844692236, 0.0018292994313265358, 0.00209892075806378, 0.0023955564365646335,
        0.0020375114833779307, 0.002260575557815427, 0.0022985835848993693, 0.002099406433733155,
        0.0018586368200849512, 0.0016053613868235123, 0.001438613175578214, 0.00143049357541102,
        0.0013095127315154774, 0.001262471540939509, 0.0013514522407795408, 0.001605619634800475,
        0.001961075896285937, 0.001865266816887284, 0.0023526578031602017, 0.00246341280674717,
        0.0025884459641316543, 0.0025289043233280195, 0.0027480853600970576, 0.003160811294269662,
        0.003061310957205347, 0.0034708227008575852, 0.0027193887970078795, 0.0025019043062104967,
        0.001721602287020676, 0.0014938287993981696, 0.001379701311142287, 0.001482278335951954,
        0.0017739654977338047, 0.0016173740322614279, 0.0014568993700072393, 0.001561687803455451,
        0.0016478201019948435, 0.001296045775857753, 0.001237797494806695, 0.0014233100660923912,
        0.001327643348684166, 0.0012058468589450113, 0.001326993796471779, 0.0015302363900395407,
        0.0019691433239499958, 0.001914607620254396, 0.0017054233649494027, 0.001999944948934884,
        0.001586257522693384, 0.0017888302317418617, 0.0024194552369763127, 0.002602486169233071,
        0.0023322619326367703, 0.002188641252143114, 0.002160637896948486, 0.0017183240941773745,
        0.0013791696278384316, 0.0013010975606518034, 0.0012917607493148195, 0.0014473287423454842,
        0.0011277134770190562, 0.0009788023156115833, 0.0011624520875172602, 0.0011529250281587956,
        0.0011286272690398862, 0.0011650110432320925, 0.0011670732824154513, 0.0012701258601414223,
        0.0010863631780132393, 0.001151403997327795, 0.001261531100583112, 0.0014433469612850924,
        0.0012625181480229021, 0.0013366719381237742, 0.0013129577294860868, 0.0010799358566476144,
        0.0012361331567450533, 0.0013155633998451644, 0.0017427549165517102, 0.0017117554798138019,
        0.0014424582600283703, 0.0014934381441740442, 0.001320132472902865, 0.0010134949123866623,
        0.0009392144030905535, 0.0008956207514417853, 0.0009483482891766875, 0.0007118586291810097,
        0.0006572633034661715, 0.0006246206878692327])
    return x_exper, y_exper


def func_exp(x: np.ndarray, a: float, b: float, c: float) -> np.ndarray:
    return a*np.exp(-b*x) + c


def log_residuals(
    params: np.ndarray, x_exper: np.ndarray, y_exper: np.ndarray,
) -> float:
    error = np.log(y_exper) - np.log(func_exp(x_exper, *params))
    return error.dot(error)  # least squares


def jac_residuals(
    params: np.ndarray, x: np.ndarray, y: np.ndarray,
) -> tuple[float, float, float]:
    a, b, c = params
    aenbx = a*np.exp(-b*x) + c
    cepbx = c*np.exp(b*x) + a

    jac_prior_dot = 2*np.stack((
        np.log(aenbx/y)/cepbx,
        a*x*np.log(y/aenbx)/cepbx,
        -np.log(y/aenbx)/aenbx,
    ))

    return jac_prior_dot.sum(axis=1)


def lower_envelope(params: np.ndarray, x: np.ndarray) -> np.ndarray:
    return func_exp(x, *params)


def jac_lower_envelope(
    params: np.ndarray, x: np.ndarray,
) -> np.ndarray:
    a, b, c = params

    return np.stack((
        np.exp(-b*x),
        -a*x*np.exp(-b*x),
        np.ones_like(x),
    ), axis=1)


def estimate(x_exper: np.ndarray, y_exper: np.ndarray) -> tuple[float, float, float]:
    c0 = y_exper.min()
    b0 = 0.5
    a0 = ((y_exper - c0)*np.exp(b0*x_exper))[:-1].min()
    return a0, b0, c0


def solve(
    x_exper: np.ndarray,
    y_exper: np.ndarray,
    x0: tuple[float, float, float],
) -> np.ndarray:
    error = check_grad(
        log_residuals, jac_residuals, x0, x_exper, y_exper,
    ) / np.abs(jac_residuals(x0, x_exper, y_exper)).sum()
    assert error < 1e-4

    error = check_grad(
        lower_envelope, jac_lower_envelope, x0, x_exper,
    )
    assert error < 1e-6

    result = minimize(
        fun=log_residuals, jac=jac_residuals,
        args=(x_exper, y_exper), x0=x0,
        bounds=Bounds(
            lb=(0.01, 0.1, -0.1),
            ub=(0.5, 20, 0.1),
        ),
        constraints=NonlinearConstraint(
            fun=partial(lower_envelope, x=x_exper),
            # confuses the optimizer; cannot make any progress
            # jac=partial(jac_lower_envelope, x=x_exper),
            lb=-np.inf, ub=y_exper,
        ),
    )
    if not result.success:
        raise ValueError(result.message)
    return result.x


def plot(
    x_exper: np.ndarray,
    y_exper: np.ndarray,
    x0: tuple[float, float, float],
    xopt: np.ndarray,
) -> plt.Figure:
    fig, ax = plt.subplots()
    ax.semilogy(x_exper, y_exper, label='experiment')
    ax.semilogy(x_exper, func_exp(x_exper, *x0), label='guess')
    ax.semilogy(x_exper, func_exp(x_exper, *xopt), label='fit')
    ax.legend()
    return fig


def main() -> None:
    x_exper, y_exper = load_data()
    x0 = estimate(x_exper, y_exper)
    xopt = solve(x_exper, y_exper, x0)
    print(xopt)
    plot(x_exper, y_exper, x0, xopt)
    plt.show()


if __name__ == '__main__':
    main()
[0.21573704 0.5899543  0.000623  ]

better guess

Python相关问答推荐

Python中MongoDB的BSON时间戳

无法使用equals_html从网址获取全文

列表上值总和最多为K(以O(log n))的最大元素数

对某些列的总数进行民意调查,但不单独列出每列

如何根据参数推断对象的返回类型?

将两只Pandas rame乘以指数

scikit-learn导入无法导入名称METRIC_MAPPING64'

按顺序合并2个词典列表

如何让程序打印新段落上的每一行?

给定高度约束的旋转角解析求解

(Python/Pandas)基于列中非缺失值的子集DataFrame

在代码执行后关闭ChromeDriver窗口

如何在Python请求中组合多个适配器?

在用于Python的Bokeh包中设置按钮的样式

如何过滤组s最大和最小行使用`transform`'

如何从比较函数生成ngroup?

当HTTP 201响应包含 Big Data 的POST请求时,应该是什么?  

获取git修订版中每个文件的最后修改时间的最有效方法是什么?

Polars表达式无法访问中间列创建表达式

Django更新视图未更新