In my program for finding the parameters of a function, scipy.optimize.curve_fit
gives the optimal values no problem.
import math
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
data = pd.read_csv('at_5.csv', index_col='index')
x_data = np.array(data['x'])
y_data = np.array(data['y'])
plt.scatter(x_data, y_data, c='blue')
def log_function(x, k, h, xo, a, ang, gamma):
return k * np.log(((((x - xo) - a * math.cos(ang)) ** 2) + (h - a * math.sin(ang)) ** 2) /
((((x - xo) + a * math.cos(ang)) ** 2) + (h + a * math.sin(ang)) ** 2)) + gamma
guess = [75, # k
80, # h
550, # xo
25, # a
math.radians(10), # angle
0
]
guesser = log_function(x_data, guess[0], guess[1], guess[2], guess[3], guess[4], guess[5])
plt.plot(x_data, guesser, color='green')
popt, cov = curve_fit(log_function, x_data, y_data, p0=guess, maxfev=10000)
print(cov)
a, b, c, d, e, f = popt
print(f'''K = {a}, h = {b}, Xo = {c}, a = {d} and angle = {math.degrees(e)}, gamma = {f}''')
y_model = log_function(x_data, a, b, c, d, e, f)
plt.plot(x_data, y_model, color='red')
plt.xlabel('X')
plt.ylabel('V(X)')
plt.legend(['Data', 'Guess', 'Matched Data'], loc='lower right')
plt.show()
However, I would also like to know what are the sum of the squared residuals for every iterations of the parameters. Is there any way to know it, or at least know the different iterations of the parameters the function ran through.
I did try scouring through the internet first, no answers. I did look into the source code. I couldn't find anything with my limited knowledge in programming. If it is given within the source, please point me in its direction.
I don't think there is a built in way to do it, but you can create a decorator that does this.
import numpy as np
from scipy.optimize import curve_fit
rng = np.random.default_rng(42)
x_data = np.arange(10)
y_data = 5*np.exp(-1.2*x_data) - 0.4 + rng.random(size=(len(x_data)))-0.5
def residual_monitor(y):
def decorator(func):
def wrapped(*args, **kwargs):
result = func(*args, **kwargs)
if result.shape == y.shape:
residual = np.sum((y - result)**2)
print(f"Residual: {residual}")
return result
return wrapped
return decorator
@residual_monitor(y_data)
def f(x, a, b, c):
return a*np.exp(-b*x) + c
guess = [1, 1, 1]
popt, cov = curve_fit(f, x_data, y_data, p0=guess)
x = np.linspace(x_data.min(), x_data.max(), 100)
y = f(x, *popt)
Output:
Residual: 21.93056649410895
Residual: 21.93056649410895
Residual: 21.93056649410895
...
Residual: 0.8082138858016964
Residual: 0.8082138858048424
Residual: 0.8082138823699341