@article{glmnet,
   author = {Jerome Friedman and Trevor Hastie and Robert Tibshirani},
   title = {Regularization Paths for Generalized Linear Models via Coordinate Descent},
   journal = {Journal of Statistical Software, Articles},
   volume = {33},
   number = {1},
   year = {2010},
   keywords = {},
   abstract = {We develop fast algorithms for estimation of generalized linear models with convex penalties. The models include linear regression, two-class logistic regression, and multi- nomial regression problems while the penalties include ℓ1 (the lasso), ℓ2 (ridge regression) and mixtures of the two (the elastic net). The algorithms use cyclical coordinate descent, computed along a regularization path. The methods can handle large problems and can also deal efficiently with sparse features. In comparative timings we find that the new algorithms are considerably faster than competing methods.},
   issn = {1548-7660},
   pages = {1--22},
   doi = {10.18637/jss.v033.i01},
   url = {https://www.jstatsoft.org/v033/i01}
}

@article{coxnet,
   author = {Noah Simon and Jerome Friedman and Trevor Hastie and Robert Tibshirani},
   title = {Regularization Paths for Cox's Proportional Hazards Model via Coordinate Descent},
   journal = {Journal of Statistical Software, Articles},
   volume = {39},
   number = {5},
   year = {2011},
   keywords = {},
   abstract = {We introduce a pathwise algorithm for the Cox proportional hazards model, regularized by convex combinations of l1 and l2 penalties (elastic net). Our algorithm fits via cyclical coordinate descent, and employs warm starts to find a solution along a regularization path. We demonstrate the efficacy of our algorithm on real and simulated data sets, and find considerable speedup between our algorithm and competing methods.},
   issn = {1548-7660},
   pages = {1--13},
   doi = {10.18637/jss.v039.i05},
   url = {https://www.jstatsoft.org/v039/i05}
}

@article{strongrules,
author = {Robert Tibshirani and Jacob Bien and Jerome Friedman and Trevor Hastie and Noah Simon and Jonathan Taylor and Ryan Tibshirani},
title = {Strong rules for discarding predictors in lasso-type problems},
journal = {Journal of the Royal Statistical Society: Series B (Statistical Methodology)},
volume = {74},
number = {2},
pages = {245-266},
keywords = {Convex optimization, Lasso, l1-regularization, Screening, Sparsity},
doi = {10.1111/j.1467-9868.2011.01004.x},
url = {https://rss.onlinelibrary.wiley.com/doi/abs/10.1111/j.1467-9868.2011.01004.x},
eprint = {https://rss.onlinelibrary.wiley.com/doi/pdf/10.1111/j.1467-9868.2011.01004.x},
abstract = {Summary.  We consider rules for discarding predictors in lasso regression and related problems, for computational efficiency. El Ghaoui and his colleagues have proposed ‘SAFE’ rules, based on univariate inner products between each predictor and the outcome, which guarantee that a coefficient will be 0 in the solution vector. This provides a reduction in the number of variables that need to be entered into the optimization. We propose strong rules that are very simple and yet screen out far more predictors than the SAFE rules. This great practical improvement comes at a price: the strong rules are not foolproof and can mistakenly discard active predictors, i.e. predictors that have non-zero coefficients in the solution. We therefore combine them with simple checks of the Karush–Kuhn–Tucker conditions to ensure that the exact solution to the convex problem is delivered. Of course, any (approximate) screening method can be combined with the Karush–Kuhn–Tucker conditions to ensure the exact solution; the strength of the strong rules lies in the fact that, in practice, they discard a very large number of the inactive predictors and almost never commit mistakes. We also derive conditions under which they are foolproof. Strong rules provide substantial savings in computational time for a variety of statistical optimization problems.},
year = {2012}
}

@misc{block,
Author = {Noah Simon and Jerome Friedman and Trevor Hastie},
Title = {A Blockwise Descent Algorithm for Group-penalized Multiresponse and Multinomial Regression},
Year = {2013},
Eprint = {arXiv:1311.6529},
}

@misc{best_subset,
Author = {Trevor Hastie and Robert Tibshirani and Ryan Tibshirani},
Title = {Extended Comparisons of Best Subset Selection, Forward Stepwise Selection, and the Lasso},
Year = {2017},
Eprint = {arXiv:1707.08692},
}

@book{Therneau2000,
author = {Therneau, Terry M. and Grambsch, Patricia M.},
file = {:Users/kjytay/Dropbox/Macbook/Documents/Mendeley/Therneau and Grambsch{\_}Modeling Survival Data- Extending the Cox Model.pdf:pdf},
isbn = {9781441931610},
pages = {1--346},
publisher = {Springer},
title = {{Modeling survival data: extending the Cox model}},
year = {2000}
}