8000 MAINT Replaces cnp.ndarray with memory views in _cd_fast (#25775) · scikit-learn/scikit-learn@63ff303 · GitHub
[go: up one dir, main page]

Skip to content

Commit 63ff303

Browse files
authored
MAINT Replaces cnp.ndarray with memory views in _cd_fast (#25775)
1 parent d226c9d commit 63ff303

File tree

2 files changed

+21
-25
lines changed

2 files changed

+21
-25
lines changed

setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,7 @@
9090
"sklearn.ensemble._hist_gradient_boosting.common",
9191
"sklearn.ensemble._hist_gradient_boosting.utils",
9292
"sklearn.feature_extraction._hashing_fast",
93+
"sklearn.linear_model._cd_fast",
9394
"sklearn.linear_model._sag_fast",
9495
"sklearn.linear_model._sgd_fast",
9596
"sklearn.manifold._barnes_hut_tsne",

sklearn/linear_model/_cd_fast.pyx

Lines changed: 20 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -90,13 +90,12 @@ cdef floating diff_abs_max(int n, floating* a, floating* b) noexcept nogil:
9090
m = d
9191
return m
9292

93-
# TODO: use const fused typed memoryview where possible when Cython 0.29.33 is used.
9493
def enet_coordinate_descent(
95-
cnp.ndarray[floating, ndim=1, mode='c'] w,
94+
floating[::1] w,
9695
floating alpha,
9796
floating beta,
98-
cnp.ndarray[floating, ndim=2, mode='fortran'] X,
99-
cnp.ndarray[floating, ndim=1, mode='c'] y,
97+
const floating[::1, :] X,
98+
const floating[::1] y,
10099
unsigned int max_iter,
101100
floating tol,
102101
object rng,
@@ -273,17 +272,16 @@ def enet_coordinate_descent(
273272
return np.asarray(w), gap, tol, n_iter + 1
274273

275274

276-
# TODO: use const fused typed memoryview where possible when Cython 0.29.33 is used.
277275
def sparse_enet_coordinate_descent(
278-
cnp.ndarray[floating, ndim=1, mode='c'] w,
276+
floating[::1] w,
279277
floating alpha,
280278
floating beta,
281-
cnp.ndarray[floating, ndim=1, mode='c'] X_data,
279+
const floating[::1] X_data,
282280
const int[::1] X_indices,
283281
const int[::1] X_indptr,
284-
cnp.ndarray[floating, ndim=1, mode='c'] y,
285-
cnp.ndarray[floating, ndim=1, mode='c'] sample_weight,
286-
cnp.ndarray[floating, ndim=1, mode='c'] X_mean,
282+
const floating[::1] y,
283+
const floating[::1] sample_weight,
284+
const floating[::1] X_mean,
287285
unsigned int max_iter,
288286
floating tol,
289287
object rng,
@@ -340,7 +338,7 @@ def sparse_enet_coordinate_descent(
340338
# R = y - Zw, weighted version R = sample_weight * (y - Zw)
341339
cdef floating[::1] R
342340
cdef floating[::1] XtA
343-
cdef floating[::1] yw
341+
cdef const floating[::1] yw
344342

345343
if floating is float:
346344
dtype = np.float32
@@ -565,14 +563,13 @@ def sparse_enet_coordinate_descent(
565563
return np.asarray(w), gap, tol, n_iter + 1
566564

567565

568-
# TODO: use const fused typed memoryview where possible when Cython 0.29.33 is used.
569566
def enet_coordinate_descent_gram(
570-
cnp.ndarray[floating, ndim=1, mode='c'] w,
567+
floating[::1] w,
571568
floating alpha,
572569
floating beta,
573-
cnp.ndarray[floating, ndim=2, mode='c'] Q,
574-
cnp.ndarray[floating, ndim=1, mode='c'] q,
575-
cnp.ndarray[floating, ndim=1] y,
570+
const floating[:, ::1] Q,
571+
const floating[::1] q,
572+
const floating[:] y,
576573
unsigned int max_iter,
577574
floating tol,
578575
object rng,
@@ -633,8 +630,8 @@ def enet_coordinate_descent_gram(
633630

634631
cdef floating y_norm2 = np.dot(y, y)
635632
cdef floating* w_ptr = &w[0]
636-
cdef floating* Q_ptr = &Q[0, 0]
637-
cdef floating* q_ptr = &q[0]
633+
cdef const floating* Q_ptr = &Q[0, 0]
634+
cdef const floating* q_ptr = &q[0]
638635
cdef floating* H_ptr = &H[0]
639636
cdef floating* XtA_ptr = &XtA[0]
640637
tol = tol * y_norm2
@@ -736,14 +733,12 @@ def enet_coordinate_descent_gram(
736733

737734
return np.asarray(w), gap, tol, n_iter + 1
738735

739-
# TODO: use const fused typed memoryview where possible when Cython 0.29.33 is used.
740736
def enet_coordinate_descent_multi_task(
741-
cnp.ndarray[floating, ndim=2, mode='fortran'] W,
737+
const floating[::1, :] W,
742738
floating l1_reg,
743739
floating l2_reg,
744-
# TODO: use const qualified fused-typed memoryview when Cython 3.0 is used.
745-
cnp.ndarray[floating, ndim=2, mode='fortran'] X,
746-
cnp.ndarray[floating, ndim=2, mode='fortran'] Y,
740+
const floating[::1, :] X,
741+
const floating[::1, :] Y,
747742
unsigned int max_iter,
748743
floating tol,
749744
object rng,
@@ -807,8 +802,8 @@ def enet_coordinate_descent_multi_task(
807802
cdef UINT32_t rand_r_state_seed = rng.randint(0, RAND_R_MAX)
808803
cdef UINT32_t* rand_r_state = &rand_r_state_seed
809804

810-
cdef floating* X_ptr = &X[0, 0]
811-
cdef floating* Y_ptr = &Y[0, 0]
805+
cdef const floating* X_ptr = &X[0, 0]
806+
cdef const floating* Y_ptr = &Y[0, 0]
812807

813808
if l1_reg == 0:
814809
warnings.warn("Coordinate descent with l1_reg=0 may lead to unexpected"

0 commit comments

Comments
 (0)
0