Question
Please fill the parts with @: clearvars clc addpath('../Generation') addpath('../Basic_blocks') addpath('../Algorithms') % Loading scenarios % =========================== scenario=2; [data_reg set_up]=scenarios_regression(scenario); % Definition of the problem %===================================
Please fill the parts with "@":
clearvars clc addpath('../Generation') addpath('../Basic_blocks') addpath('../Algorithms')
% Loading scenarios % =========================== scenario=2; [data_reg set_up]=scenarios_regression(scenario);
% Definition of the problem %=================================== loss_LS_L2 = @(N,A,x,y,lambda) (1/N*(A*x-y)'*(A*x-y)+lambda/2*x'*x); grad_LS_L2 = @(N,A,x,y,lambda) (2/N*A'*(A*x-y)+lambda*x); Hess_LS_L2 = @(N,A,x,y,lambda) (2/N*A'*A+lambda*eye(length(x)));
% Definition of the problem %=================================== loss_LS_L2 = @(N,A,x,y,lambda) (1/N*(A*x-y)'*(A*x-y)+lambda/2*x'*x); grad_LS_L2 = @(N,A,x,y,lambda) (2/N*A'*(A*x-y)+lambda*x); Hess_LS_L2 = @(N,A,x,y,lambda) (2/N*A'*A+lambda*eye(length(x))); grad_LS_L2_inst = @(N,A,x,y,lambda) (2*A'*(A*x-y)+lambda*x);
% Different ways to solve theoreticaly the LS %========================================= % Solution of the empirical risk using CVX x_L2_cvx=solver_cvx(set_up,@(N,A,x,y,lambda) loss_LS_L2(N,A,x,y,lambda)); x_opt=inv(2/set_up.Niter_train*set_up.Utrain(:,1:set_up.M+1)'*set_up.Utrain(:,1:set_up.M+1)+set_up.Lambda*eye(set_up.M+1))... *2/set_up.Niter_train*set_up.Utrain(:,1:set_up.M+1)'*set_up.ytrain(:,1); loss_opt=loss_LS_L2(set_up.Niter_train,set_up.Utrain(:,1:set_up.M+1),x_L2_cvx,set_up.ytrain(:,1),set_up.Lambda); [x_L2_cvx x_opt ]
% Gradient descent out_gd =grad_FOM(set_up,@(N,A,x,y,lambda) grad_LS_L2(N,A,x,y,lambda)); loss_grad=eval_loss(out_gd,set_up,@(N,A,x,y,lambda) loss_LS_L2(N,A,x,y,lambda));
% Newton algorithm out_hess =grad_SOM(set_up,@(N,A,x,y,lambda) grad_LS_L2(N,A,x,y,lambda),@(N,A,x,y,lambda) Hess_LS_L2(N,A,x,y,lambda)); loss_hess=eval_loss(out_hess,set_up,@(N,A,x,y,lambda) loss_LS_L2(N,A,x,y,lambda));
% BFGS algorithm out_BFGS =BFGS(set_up,@(N,A,x,y,lambda) grad_LS_L2(N,A,x,y,lambda)); loss_BFGS=eval_loss(out_BFGS,set_up,@(N,A,x,y,lambda) loss_LS_L2(N,A,x,y,lambda)); hold off
% Distributed ADMM algorithm out_admm_dist =admm_ridge_dist(set_up); out_admm_dist2=[ out_admm_dist kron(out_admm_dist(:,set_up.Niter_train/set_up.Number_nodes),... ones(1,set_up.Niter_train-set_up.Niter_train/set_up.Number_nodes))]; loss_admm_dist=eval_loss(out_admm_dist2,set_up,@(N,A,x,y,lambda) loss_LS_L2(N,A,x,y,lambda)); plot(1:set_up.Niter_train,10*log10(sum((loss_admm_dist-loss_opt*ones(1,set_up.Niter_train)).^2,1)),'k','LineWidth',3),
figure(1) % Plot of learning curves plot(1:set_up.Niter_train,10*log10(sum((loss_grad-loss_opt*ones(1,set_up.Niter_train)).^2,1)),'b','LineWidth',3), hold plot(1:set_up.Niter_train,10*log10(sum((loss_hess-loss_opt*ones(1,set_up.Niter_train)).^2,1)),'r','LineWidth',3), plot(1:set_up.Niter_train,10*log10(sum((loss_BFGS-loss_opt*ones(1,set_up.Niter_train)).^2,1)),'g','LineWidth',3), plot(1:set_up.Niter_train,10*log10(sum((loss_admm_dist-loss_opt*ones(1,set_up.Niter_train)).^2,1)),'k','LineWidth',3), hold off grid xlabel('Iterations') ylabel('MSE') title('Ridge Algorithm') legend('Gradient','Newton','BFGS','ADMM distributed')
Step by Step Solution
There are 3 Steps involved in it
Step: 1
Get Instant Access to Expert-Tailored Solutions
See step-by-step solutions with expert insights and AI powered tools for academic success
Step: 2
Step: 3
Ace Your Homework with AI
Get the answers you need in no time with our AI-driven, step-by-step assistance
Get Started