THMMY's "Optimization Techniques" course assignments.
Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

method_SteepDesc_Proj.m 1.8 KiB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950
  1. function [x_vals, f_vals, k] = method_SteepDesc_Proj(f, grad_f, xk, sk, limmits, tol, max_iter, mode)
  2. % f: Objective function
  3. % grad_f: Gradient of the function
  4. % xk: Initial point [x0; y0]
  5. % sk: Step size (fixed positive scalar)
  6. % limits: Bounds of the feasible set for each dimension
  7. % tol: Tolerance for stopping criterion
  8. % max_iter: Maximum number of iterations
  9. % x_vals: Vector with the (x,y) values until minimum
  10. % f_vals: Vector with f(x,y) values until minimum
  11. % k: Number of iterations
  12. if strcmp(mode, 'armijo') == 1
  13. gamma_f = @(f, grad_f, dk, xk) gamma_armijo(f, grad_f, dk, xk);
  14. elseif strcmp(mode, 'minimized') == 1
  15. gamma_f = @(f, grad_f, dk, xk) gamma_minimized(f, grad_f, dk, xk);
  16. else % mode == 'fixed'
  17. gamma_f = @(f, grad_f, dk, xk) gamma_fixed(f, grad_f, dk, xk);
  18. end
  19. % Project the first point if needed
  20. xk = ProjectionPoint(xk, limmits);
  21. % Storage for iterations, begin with the first point
  22. x_vals = xk;
  23. f_vals = f(xk);
  24. for k = 1:max_iter
  25. % Check for convergence
  26. if norm(grad_f(xk)) < tol
  27. break;
  28. end
  29. dk = - grad_f(xk); % Steepset descent direction
  30. % First calculate xk-bar and project it if nessesary
  31. xkbar = xk + sk * dk;
  32. xkbar = ProjectionPoint(xkbar, limmits);
  33. dk = (xkbar - xk); % Steepest descent projection direction
  34. gk = gamma_f(f, grad_f, dk, xk); % Calculate gamma
  35. x_next = xk + gk * dk; % Update step
  36. f_next = f(x_next);
  37. xk = x_next; % Update point
  38. x_vals = [x_vals, x_next]; % Store values
  39. f_vals = [f_vals, f_next]; % Store function values
  40. end
  41. end