diff --git a/sklearn/manifold/t_sne.py b/sklearn/manifold/t_sne.py index 80b6003f2520d..22029f5cff7e1 100644 --- a/sklearn/manifold/t_sne.py +++ b/sklearn/manifold/t_sne.py @@ -338,6 +338,14 @@ class TSNE(BaseEstimator): Maximum number of iterations for the optimization. Should be at least 200. + n_iter_without_progress : int, optional (default: 30) + Maximum number of iterations without progress before we abort the + optimization. + + min_grad_norm : float, optional (default: 1E-7) + If the gradient norm is below this threshold, the optimization will + be aborted. + metric : string or callable, optional The metric to use when calculating distance between instances in a feature array. If metric is a string, it must be one of the options @@ -395,6 +403,7 @@ class TSNE(BaseEstimator): """ def __init__(self, n_components=2, perplexity=30.0, early_exaggeration=4.0, learning_rate=1000.0, n_iter=1000, + n_iter_without_progress=30, min_grad_norm=1e-7, metric="euclidean", init="random", verbose=0, random_state=None): if init not in ["pca", "random"]: @@ -404,6 +413,8 @@ def __init__(self, n_components=2, perplexity=30.0, self.early_exaggeration = early_exaggeration self.learning_rate = learning_rate self.n_iter = n_iter + self.n_iter_without_progress = n_iter_without_progress + self.min_grad_norm = min_grad_norm self.metric = metric self.init = init self.verbose = verbose @@ -504,6 +515,8 @@ def _tsne(self, P, alpha, n_samples, random_state, X_embedded=None): P /= self.early_exaggeration params, error, it = _gradient_descent( _kl_divergence, params, it=it + 1, n_iter=self.n_iter, + min_grad_norm=self.min_grad_norm, + n_iter_without_progress=self.n_iter_without_progress, momentum=0.8, learning_rate=self.learning_rate, verbose=self.verbose, args=[P, alpha, n_samples, self.n_components])