function [x_vals, f_vals, k] = method_lev_mar(f, grad_f, hessian_f, e, xk, tol, max_iter, mode) % f: Objective function % grad_f: Gradient of the function % hessian_f: Hessian of the function % e: Offset for hessian damping Hk' = Hk + mI % - when: Hk not positive defined % - Where: m = abs(min(eig(Hk))) + e % xk: Initial point [xk, yk] % tol: Tolerance for stopping criterion % max_iter: Maximum number of iterations % x_vals: Vector with the (x,y) values until minimum % f_vals: Vector with f(x,y) values until minimum % k: Number of iterations if strcmp(mode, 'armijo') == 1 gamma_f = @(f, grad_f, dk, xk) gamma_armijo(f, grad_f, dk, xk); elseif strcmp(mode, 'minimized') == 1 gamma_f = @(f, grad_f, dk, xk) gamma_minimized(f, grad_f, dk, xk); else % mode == 'fixed' gamma_f = @(f, grad_f, dk, xk) gamma_fixed(f, grad_f, dk, xk); end x_vals = xk; % Store iterations f_vals = f(xk(1), xk(2)); for k = 1:max_iter grad = grad_f(xk(1), xk(2)); % Check for convergence if norm(grad) < tol break; end hess = hessian_f(xk(1), xk(2)); % Check if hessian is not positive defined lmin = min(eig(hess)); if lmin <= 0 % Select m with offset to stear hess to positive eigenvalues m = abs(lmin) + e; mI = m * eye(size(hess)); if min(eig(hess + mI)) <= 0 % Fail-check warning('Can not normalize hessian matrix.'); end end % Solve for search direction using Newton's step dk = - inv(hess + mI) * grad; % Calculate gamma gk = gamma_f(f, grad_f, dk, xk); x_next = xk + gk * dk'; % Update step f_next = f(x_next(1), x_next(2)); xk = x_next; % Update point x_vals = [x_vals; x_next]; % Store values f_vals = [f_vals; f_next]; % Store function values end end