[R] Proximal Gradient Descend for Lasso

来源:互联网 发布:淘宝哪家女童卖的好 编辑:程序博客网 时间:2024/06/15 19:04

    This is a short code for studying proximal gradient descent algorithm.

    #--------------------------- functions to be used ----------------------## the main function f = g + hf <- function(x, A, b, lambda){        1/2*norm(A %*% x - b, "2")^2 + lambda*sum(abs(x))}## smooth function gg <- function(x, A, b){        1/2*norm(A %*% x - b, "2")^2}## subgradient of smooth functionSubg <- function(x, A, b){        t(A) %*% (A %*% x - b)}## proximal function of none smooth function hprox_L1 <- function(x, lambda){        sign(x) * pmax(abs(x) - lambda, 0)}#------------------------------------- generate data --------------------------------------# dimensionn <- 500                ## number of samplesp <- 1000               ## number of featuress <- 100                ## number of inportant features## coefficientsbeta <- c(runif(s, -2, 2), rep(0, p- s))A <- matrix(rnorm(p*p), p, p)A <- scale(A, TRUE, TRUE)b <-  A %*% beta + 0.1*rnorm(n)# tunning paramterlambda <- 0.1*max(abs(t(A) %*% b))#-------------------------------- proximal gradient descend -------------------------------# initilizationt <- 1                   ## step sizebeta0 <- 0.5             ## line search for t paramterx <- rep(10, p)          ## initilization of xxprev <- xmaxiter <- 200           ## number of iterations# to obtain objective function valueopt_f <- rep(0, maxiter)for(i in 1:maxiter){        cat(i, fill = TRUE)        # subgradient        grad_x <- Subg(x, A, b)        # proximal gradient descend stage and line search        while(TRUE){                z <- prox_L1(x - t*grad_x, t*lambda)                if(g(z , A, b) < g(x, A, b) + t(grad_x) %*% (z - x) + 1/(2*t)*norm(z - x, "2")^2) break                t <- beta0*t        }        # store temp x value        xprev <- x        x <- z        # compute value of objective function        opt_f[i] <- f(x, A, b, lambda)        if(i > 1 && (abs(opt_f[i] - opt_f[i-1]) < 10^-8)) break }#----------------------------------accelerated proximal gradient descent------------------------# initilizationt <- 1                   ## step sizebeta0 <- 0.5             ## line search for t paramterx <- rep(10, p)          ## initilization of xxprev <- xmaxiter <- 200           ## number of iterations# to obtain objective function valueopt_f_fast <- rep(0, maxiter)for(i in 1:maxiter){        cat(i, fill = TRUE)        # accelarated stage        y <- x + (i-2)/(i+1)*(x - xprev)        # subgradient        grad_y <- Subg(y, A, b)        # proximal gradient descend stage and line search        while(TRUE){                z <- prox_L1(y - t*grad_y, t*lambda)                if(g(z , A, b) < g(y, A, b) + t(grad_y) %*% (z - y) + 1/(2*t)*norm(z - y, "2")^2) break                t <- beta0*t        }        # store temp x value        xprev <- x        x <- z        # compute value of objective function        opt_f_fast[i] <- f(x, A, b, lambda)        if(i > 1 && (abs(opt_f_fast[i] - opt_f_fast[i-1]) < 10^-8)) break }#---------------------------plot-----------------------------------------------------------library(ggplot2)opt_f[opt_f==0] <- opt_f[which(opt_f!=0)[length(which(opt_f!=0))]]opt_f_fast[opt_f_fast==0] <- opt_f_fast[which(opt_f_fast!=0)[length(which(opt_f_fast!=0))]]data <- data.frame(iter=seq(200), opt_f=opt_f, opt_f_fast=opt_f_fast)# gather datalibrary(tidyr)test_data <- gather(data, variable, value, -iter)ggplot(test_data, aes(x=iter, y=value, color=variable)) +         geom_line()# melt datalibrary(reshape2)test_data <- melt(data, id="iter")ggplot(test_data, aes(x=iter, y=value, color=variable)) +         geom_line()

    这里写图片描述

    0 0
    原创粉丝点击