8000 Merge pull request #5186 from sdvillal/issue5165 · scikit-learn/scikit-learn@eb6b654 · GitHub
[go: up one dir, main page]

Skip to content

Commit eb6b654

Browse files
committed
Merge pull request #5186 from sdvillal/issue5165
[MRG] ENH exposing extra parameters in t-sne
2 parents 9ec7ecd + 8bc47f2 commit eb6b654

File tree

1 file changed

+13
-0
lines changed

1 file changed

+13
-0
lines changed

sklearn/manifold/t_sne.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -338,6 +338,14 @@ class TSNE(BaseEstimator):
338338
Maximum number of iterations for the optimization. Should be at
339339
least 200.
340340
341+
n_iter_without_progress : int, optional (default: 30)
342+
Maximum number of iterations without progress before we abort the
343+
optimization.
344+
345+
min_grad_norm : float, optional (default: 1E-7)
346+
If the gradient norm is below this threshold, the optimization will
347+
be aborted.
348+
341349
metric : string or callable, optional
342350
The metric to use when calculating distance between instances in a
343351
feature array. If metric is a string, it must be one of the options
@@ -395,6 +403,7 @@ class TSNE(BaseEstimator):
395403
"""
396404
def __init__(self, n_components=2, perplexity=30.0,
397405
early_exaggeration=4.0, learning_rate=1000.0, n_iter=1000,
406+
n_iter_without_progress=30, min_grad_norm=1e-7,
398407
metric="euclidean", init="random", verbose=0,
399408
random_state=None):
400409
if init not in ["pca", "random"]:
@@ -404,6 +413,8 @@ def __init__(self, n_components=2, perplexity=30.0,
404413
self.early_exaggeration = early_exaggeration
405414
self.learning_rate = learning_rate
406415
self.n_iter = n_iter
416+
self.n_iter_without_progress = n_iter_without_progress
417+
self.min_grad_norm = min_grad_norm
407418
self.metric = metric
408419
self.init = init
409420
self.verbose = verbose
@@ -504,6 +515,8 @@ def _tsne(self, P, alpha, n_samples, random_state, X_embedded=None):
504515
P /= self.early_exaggeration
505516
params, error, it = _gradient_descent(
506517
_kl_divergence, params, it=it + 1, n_iter=self.n_iter,
518+
min_grad_norm=self.min_grad_norm,
519+
n_iter_without_progress=self.n_iter_without_progress,
507520
momentum=0.8, learning_rate=self.learning_rate,
508521
verbose=self.verbose, args=[P, alpha, n_samples,
509522
self.n_components])

0 commit comments

Comments
 (0)
0