// RELEASE 1 // ############################################################################# // the goal of the current routine is to test some of the optimization solvers // implemented in Scilab (nounded and unbounded ones) for local minimum surch. // Direct method such as Nelder-mead ones => Neldermead solvers // indirect method such as Newton, like-Newton ones (BFGS) => optim solver // Specific programming for quadratic cost functions => qpsolve solver // // The global surch using Genetic Algorithm or Simulated Annealing will be tested // in a further realease. // test case choice // CHOICE_SOLVER = 1 => Nelder-mead solver with no bound ==> fminsearch // CHOICE_SOLVER = 2 => Nelder-Mead solver with bounds // CHOICE_SOLVER = 3 => Optim solver with no bound - gradiant explicitly definied // CHOICE_SOLVER = 4 => Optim solver with no bound - gradiant calculated with derivative macro // CHOICE_SOLVER = 5 => Optim solver with no bound - gradiant calculated with numdiff macro // CHOICE_SOLVER = 6 => Optim solver with bounds - gradiant & Hessian calculated with numdiff macro // CHOICE_SOLVER = 7 => qpsolve with no bound // CHOICE_SOLVER = 8 => qpsolve with bounds // CHOICE_SOLVER = 9 => Newton method with gradiant & Hessian explicitly calculated // CHOICE_SOLVER = 10 => Newton method using derivative and/or numdiff function // and so on CHOICE_SOLVER = 2; // Function selection (with 2 parameters) & 3D plot : // - CHOICE_FUNCTION = 1 ==> a basic quadratic function Y = X(1)^2 + X(2)^2 // - CHOICE_FUNCTION = 2 ==> a "saddle" function i.e. a saddle point exits // - CHOICE_FUNCTION = 3 ==> a non-quadratic Rosenbrock function Y = 100*(X(2) - X(1)^2)^2 + 1-X(1))^2 // - CHOICE_FUNCTION = 4 ==> a non quadratic Rastrigin function : a single global minimum & various local minima // - CHOICE_FUNCTION = 5 ==> another one CHOICE_FUNCTION = 3; // Nota : some comments concerning optim macro : // The ind input argument is a message sent from the solver to the cost function so that the cost function knows what to compute. // If ind=2, costf must provide f. // If ind=3, costf must provide g. // If ind=4, costf must provide f and g. // If ind=1, costf can print messages. // ########################################################################## select CHOICE_FUNCTION case 1 // Basic quadratic function - general definition -> to be plotted clf() function [z] = myfunction_plot(x,y) z = x^2 + y^2; endfunction x = [-1:0.1:1]'; y = x; z = feval(x,y,myfunction_plot); plot3d(x,y,z) // my quadratic function function [f,G,index,ind] = myfunction(x,index,ind) f = x(1)^2 + x(2)^2; // Calculation of the gradient G vector G(1) = 2*x(1); G(2) = 2*x(2); // Calculation of the Hessian matrix (quadratic function ==> H is a constant matrix) H = [2 0 ; 0 2] ; endfunction // Initial parameters & bounds definition initial_parameters = [-1.2 1.0]; lower_bounds = [-2 -2]; upper_bounds = [2. 2.]; case 2 // saddle function - general definition -> to be plotted clf() function [z] = myfunction_plot(x,y) z = x^2 - y^2 endfunction x = [-1:0.1:1]'; y = x; z = feval(x,y,myfunction_plot); plot3d(x,y,z) // saddle quadratic function function [f,G,index,ind] = myfunction(x,index,ind) f = x(1)^2 - x(2)^2; // Calculation of the gradient G vector G(1) = 2*x(1); G(2) = -2*x(2); // Calculation of the Hessian matrix (quadratic function ==> H is a constant matrix) H = [2 0 ; 0 -2] ; endfunction // Initial parameters & bounds definition initial_parameters = [-1.2 1.0]; lower_bounds = [-2 -2]; upper_bounds = [2. 2.]; case 3 // Rosenbrock function - general definition -> to be plotted clf() function [z] = myfunction_plot(x,y) z = 100.0 *(y - x^2)^2 + (1 - x)^2; endfunction x = [-1.2:0.1:1]'; y = x; z = feval(x,y,myfunction_plot); plot3d(x,y,z,theta=61,alpha=1.8) // Rosenbrock function function [f,G,index,ind] = myfunction(x,index,ind) f = 100*(x(2) - x(1)^2)^2 + (1 - x(1))^2; // Calculation of the gradient G vector G(1) = -400*x(1)*(x(2) - x(1)^2) - 2*(1 - x(1)); G(2) = 200*(x(2) - x(1)^2); // Calculation of the Hessian matrix H(1,1) = -400*x(2) + 1200*x(1)^2 + 2; H(1,2) = -400*x(1); H(2,1) = -400*x(1); H(2,2) = 200; endfunction // Initial parameters & bounds definition initial_parameters = [-1.2 1.0]; lower_bounds = [-2 -2]; upper_bounds = [2. 2.]; case 4 // Rastrigin function - general definition -> to be plotted clf() function [z] = myfunction_plot(x,y) z = x^2+y^2-cos(12*x)-cos(18*y); endfunction x = [-2:0.05:2]'; y = x; z = feval(x,y,myfunction_plot); plot3d(x,y,z) // a Rastrigin function function [f,G,index,ind] = myfunction(x,index,ind) f = x(1)^2+x(2)^2-cos(12*x(1))-cos(18*x(2)); // Calculation of the gradient G vecor G(1) = 2*x(1) + 12*sin(12*x(1)); G(2) = 2*x(2) + 18*sin(18*x(2)); // Calculation of the Hessian matrix H(1,1) = 2 + 144*cos(12*x(1)); H(1,2) = 0; H(2,1) = 0; H(2,2) = 2 + 324*cos(18*x(2)); endfunction // Initial parameters & bounds definition initial_parameters = [1. 2.]; lower_bounds = [0. 0.]; upper_bounds = [2. 2.]; end // ########################################################################## select CHOICE_SOLVER case 1 initial_parameters = [-1.2 1.0]; optimized_parameters = fminsearch(myfunction,initial_parameters); case 2 timer() nm = neldermead_new (); // initialization nm = neldermead_configure(nm,"-numberofvariables",2); // Number of variables nm = neldermead_configure(nm,"-function",myfunction); // point on the function to be minimized nm = neldermead_configure(nm,"-x0",initial_parameters'); // initial parameters implamented into Nelder-Mead solver & BE CAREFUL ' == transposix vector nm = neldermead_configure(nm,"-maxiter",100); // Max number of iterations //nm = neldermead_configure(nm,"-method","variable"); // Method to be used with no bounds (fmsearch has to be intended as a particular case of Nelder-Mead solver) nm = neldermead_configure(nm,"-method","box"); // box => bounded method nm = neldermead_configure(nm,"-boundsmin",lower_bounds); nm = neldermead_configure(nm,"-boundsmax",upper_bounds); nm = neldermead_search(nm); // optimzation is launched optimized_parameters = neldermead_get(nm,"-xopt"); // extract of the results number_iterations = neldermead_get(nm,"-iterations"); // extract of the number of iterations function_evaluations = neldermead_get(nm,"-funevals"); // extract the number of evaluation of the function CPU_=timer(); case 3 timer() nap = 100; iter = 100; epsg = %eps; [fopt,optimized_parameters,gradopt] = optim(myfunction,x0,"ar",nap,iter,epsg,imp=-1); CPU_=timer(); case 4 case 5 case 6 case 7 end printf("##################################################\n"); printf(" - X1 optimized = %g\n",optimized_parameters(1)); printf(" - X2 optimized = %g\n",optimized_parameters(2)); if (CHOICE_SOLVER == 2) then printf(" - Number of iteration(s) = %d\n",number_iterations); printf(" - Number of function evaluation(s) = %d\n",function_evaluations); end printf(" - CPU = %g\n",CPU_); printf("##################################################\"); clear all