THMMY's "Optimization Techniques" course assignments.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

steepest_descent.m 1.3 KiB

18 hours ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344
  1. function [x_vals, f_vals, k] = steepest_descent(f, grad_f, x0, tol, max_iter, mode)
  2. % f: Objective function
  3. % grad_f: Gradient of the function
  4. % x0: Initial point [x0, y0]
  5. % tol: Tolerance for stopping criterion
  6. % max_iter: Maximum number of iterations
  7. % x_vals: Vector with the (x,y) values until minimum
  8. % f_vals: Vector with f(x,y) values until minimum
  9. % k: Number of iterations
  10. if strcmp(mode, 'armijo') == 1
  11. gamma_f = @(f, grad_f, x0) gamma_armijo(f, grad_f, x0);
  12. elseif strcmp(mode, 'minimized') == 1
  13. gamma_f = @(f, grad_f, x0) gamma_minimized(f, grad_f, x0);
  14. else % mode == 'fixed'
  15. gamma_f = @(f, grad_f, x0) gamma_fixed(f, grad_f, x0);
  16. end
  17. % Storage for iterations, begin with the first point
  18. x_vals = x0;
  19. f_vals = f(x0(1), x0(2));
  20. for k = 1:max_iter
  21. grad = grad_f(x0(1), x0(2));
  22. % Check for convergence
  23. if norm(grad) < tol
  24. break;
  25. end
  26. dk = - grad;
  27. % Calculate gamma
  28. gk = gamma_f(f, grad_f, x0);
  29. x_next = x0 + gk * dk'; % Update step
  30. f_next = f(x_next(1), x_next(2));
  31. x0 = x_next; % Update point
  32. x_vals = [x_vals; x_next]; % Store values
  33. f_vals = [f_vals; f_next]; % Store function values
  34. end
  35. end