THMMY's "Optimization Techniques" course assignments.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

method_steepest_descent.m 1.4 KiB

преди 3 седмици
преди 4 седмици
преди 3 седмици
преди 4 седмици
преди 3 седмици
преди 4 седмици
преди 3 седмици
преди 4 седмици
преди 3 седмици
преди 4 седмици
преди 3 седмици
преди 4 седмици
преди 3 седмици
преди 4 седмици
преди 3 седмици
преди 4 седмици
преди 3 седмици
преди 4 седмици
преди 3 седмици
преди 4 седмици
преди 3 седмици
12345678910111213141516171819202122232425262728293031323334353637383940414243
  1. function [x_vals, f_vals, k] = method_steepest_descent(f, grad_f, xk, tol, max_iter, mode)
  2. % f: Objective function
  3. % grad_f: Gradient of the function
  4. % xk: Initial point [x0, y0]
  5. % tol: Tolerance for stopping criterion
  6. % max_iter: Maximum number of iterations
  7. % x_vals: Vector with the (x,y) values until minimum
  8. % f_vals: Vector with f(x,y) values until minimum
  9. % k: Number of iterations
  10. if strcmp(mode, 'armijo') == 1
  11. gamma_f = @(f, grad_f, dk, xk) gamma_armijo(f, grad_f, dk, xk);
  12. elseif strcmp(mode, 'minimized') == 1
  13. gamma_f = @(f, grad_f, dk, xk) gamma_minimized(f, grad_f, dk, xk);
  14. else % mode == 'fixed'
  15. gamma_f = @(f, grad_f, dk, xk) gamma_fixed(f, grad_f, dk, xk);
  16. end
  17. % Storage for iterations, begin with the first point
  18. x_vals = xk;
  19. f_vals = f(xk(1), xk(2));
  20. for k = 1:max_iter
  21. grad = grad_f(xk(1), xk(2));
  22. % Check for convergence
  23. if norm(grad) < tol
  24. break;
  25. end
  26. dk = - grad;
  27. % Calculate gamma
  28. gk = gamma_f(f, grad_f, dk, xk);
  29. x_next = xk + gk * dk'; % Update step
  30. f_next = f(x_next(1), x_next(2));
  31. xk = x_next; % Update point
  32. x_vals = [x_vals; x_next]; % Store values
  33. f_vals = [f_vals; f_next]; % Store function values
  34. end
  35. end