Note
Click here to download the full example code
Timing comparison with scikit-learn for Lasso#
Compare time to solve large scale Lasso problems with scikit-learn.
file_sizes: 0%| | 0.00/26.8M [00:00<?, ?B/s]
file_sizes: 0%| | 24.6k/26.8M [00:00<05:06, 87.4kB/s]
file_sizes: 0%| | 49.2k/26.8M [00:00<05:08, 86.7kB/s]
file_sizes: 0%|1 | 106k/26.8M [00:00<03:11, 139kB/s]
file_sizes: 0%|1 | 123k/26.8M [00:01<04:09, 107kB/s]
file_sizes: 1%|2 | 246k/26.8M [00:01<01:58, 223kB/s]
file_sizes: 3%|7 | 737k/26.8M [00:01<00:35, 732kB/s]
file_sizes: 4%|#1 | 1.13M/26.8M [00:02<00:27, 936kB/s]
file_sizes: 8%|#9 | 2.05M/26.8M [00:02<00:14, 1.66MB/s]
file_sizes: 18%|####7 | 4.93M/26.8M [00:02<00:05, 4.26MB/s]
file_sizes: 28%|#######3 | 7.55M/26.8M [00:02<00:03, 5.79MB/s]
file_sizes: 38%|#########8 | 10.2M/26.8M [00:03<00:02, 6.87MB/s]
file_sizes: 51%|#############1 | 13.6M/26.8M [00:03<00:01, 8.36MB/s]
file_sizes: 61%|###############9 | 16.5M/26.8M [00:03<00:01, 8.92MB/s]
file_sizes: 74%|###################2 | 19.9M/26.8M [00:04<00:00, 9.79MB/s]
file_sizes: 85%|###################### | 22.8M/26.8M [00:04<00:00, 9.92MB/s]
file_sizes: 94%|########################3 | 25.1M/26.8M [00:04<00:00, 11.7MB/s]
file_sizes: 100%|#########################9| 26.7M/26.8M [00:04<00:00, 9.68MB/s]
file_sizes: 100%|##########################| 26.8M/26.8M [00:04<00:00, 5.74MB/s]
import time
import warnings
import numpy as np
from numpy.linalg import norm
import matplotlib.pyplot as plt
from libsvmdata import fetch_libsvm
from sklearn.exceptions import ConvergenceWarning
from sklearn.linear_model import Lasso as Lasso_sklearn
from sklearn.linear_model import ElasticNet as Enet_sklearn
from skglm import Lasso, ElasticNet
warnings.filterwarnings('ignore', category=ConvergenceWarning)
def compute_obj(X, y, w, alpha, l1_ratio=1):
loss = norm(y - X @ w) ** 2 / (2 * len(y))
penalty = (alpha * l1_ratio * np.sum(np.abs(w))
+ 0.5 * alpha * (1 - l1_ratio) * norm(w) ** 2)
return loss + penalty
X, y = fetch_libsvm("news20.binary"
)
alpha = np.max(np.abs(X.T @ y)) / len(y) / 10
dict_sklearn = {}
dict_sklearn["lasso"] = Lasso_sklearn(
alpha=alpha, fit_intercept=False, tol=1e-12)
dict_sklearn["enet"] = Enet_sklearn(
alpha=alpha, fit_intercept=False, tol=1e-12, l1_ratio=0.5)
dict_ours = {}
dict_ours["lasso"] = Lasso(
alpha=alpha, fit_intercept=False, tol=1e-12)
dict_ours["enet"] = ElasticNet(
alpha=alpha, fit_intercept=False, tol=1e-12, l1_ratio=0.5)
models = ["lasso", "enet"]
fig, axarr = plt.subplots(2, 1, constrained_layout=True)
for ax, model, l1_ratio in zip(axarr, models, [1, 0.5]):
pobj_dict = {}
pobj_dict["sklearn"] = list()
pobj_dict["us"] = list()
time_dict = {}
time_dict["sklearn"] = list()
time_dict["us"] = list()
# Remove compilation time
dict_ours[model].max_iter = 10_000
w_star = dict_ours[model].fit(X, y).coef_
pobj_star = compute_obj(X, y, w_star, alpha, l1_ratio)
for n_iter_sklearn in np.unique(np.geomspace(1, 50, num=15).astype(int)):
dict_sklearn[model].max_iter = n_iter_sklearn
t_start = time.time()
w_sklearn = dict_sklearn[model].fit(X, y).coef_
time_dict["sklearn"].append(time.time() - t_start)
pobj_dict["sklearn"].append(compute_obj(X, y, w_sklearn, alpha, l1_ratio))
for n_iter_us in range(1, 10):
dict_ours[model].max_iter = n_iter_us
t_start = time.time()
w = dict_ours[model].fit(X, y).coef_
time_dict["us"].append(time.time() - t_start)
pobj_dict["us"].append(compute_obj(X, y, w, alpha, l1_ratio))
ax.semilogy(
time_dict["sklearn"], pobj_dict["sklearn"] - pobj_star, label='sklearn')
ax.semilogy(
time_dict["us"], pobj_dict["us"] - pobj_star, label='skglm')
ax.set_ylim((1e-10, 1))
ax.set_title(model)
ax.legend()
ax.set_ylabel("Objective suboptimality")
axarr[1].set_xlabel("Time (s)")
plt.show(block=False)
Total running time of the script: ( 1 minutes 39.889 seconds)