8000 MultiProcessRunner: String replacement: stream_stdout->stream_output. · metacortex/tensorflow@e96a709 · GitHub
[go: up one dir, main page]

Skip to content

Commit e96a709

Browse files
rchaotensorflower-gardener
authored andcommitted
MultiProcessRunner: String replacement: stream_stdout->stream_output.
PiperOrigin-RevId: 334165921 Change-Id: I523b80b211cc0517d6d1cc4fd9447e0509b0f441
1 parent e1dbc08 commit e96a709

File tree

2 files changed

+7
-7
lines changed

2 files changed

+7
-7
lines changed

tensorflow/python/distribute/multi_process_runner.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ def __init__(self,
128128
rpc_layer=None,
129129
max_run_time=None,
130130
grpc_fail_fast=None,
131-
stream_stdout=True,
131+
stream_output=True,
132132
list_stdout=False,
133133
use_dill_for_args=True,
134134
daemon=False,
@@ -160,7 +160,7 @@ def __init__(self,
160160
grpc_fail_fast: Whether GRPC connection between processes should fail
161161
without retrying. Defaults to None, in which case the environment
162162
variable is not explicitly set.
163-
stream_stdout: True if the output/error from the subprocesses should be
163+
stream_output: True if the output/error from the subprocesses should be
164164
streamed to be printed in parent process' log. Defaults to True.
165165
list_stdout: True if the output/error from the subprocesses should be
166166
collected to be attached to the resulting `MultiProcessRunnerResult`
@@ -203,7 +203,7 @@ def __init__(self,
203203
self._rpc_layer = rpc_layer or 'grpc'
204204
self._max_run_time = max_run_time
205205
self._grpc_fail_fast = grpc_fail_fast
206-
self._stream_stdout = stream_stdout
206+
self._stream_output = stream_output
207207
# TODO(rchao): Revisit list_stdout argument to consider other solution.
208208
self._list_stdout = list_stdout
209209
self._dependence_on_chief = dependence_on_chief
@@ -251,7 +251,7 @@ def _continuously_readline_from_sub(self, pipe_r, task_type, task_id):
251251
for line in reader:
252252
task_string = '[{}-{}]:'.format(task_type, task_id)
253253
formatted_line = '{} {}'.format(task_string.ljust(14), line)
254-
if self._stream_stdout:
254+
if self._stream_output:
255255
# TODO(rchao): Use a lock here to ensure the printed lines are not
256256
# broken.
257257
print(formatted_line, end='', flush=True)
@@ -1113,7 +1113,7 @@ def run(fn,
11131113
rpc_layer=None,
11141114
max_run_time=None,
11151115
grpc_fail_fast=None,
1116-
stream_stdout=True,
1116+
stream_output=True,
11171117
list_stdout=False,
11181118
timeout=_DEFAULT_TIMEOUT_SEC,
11191119
args=None,
@@ -1133,7 +1133,7 @@ def run(fn,
11331133
rpc_layer,
11341134
max_run_time=max_run_time,
11351135
grpc_fail_fast=grpc_fail_fast,
1136-
stream_stdout=stream_stdout,
1136+
stream_output=stream_output,
11371137
list_stdout=list_stdout,
11381138
args=args,
11391139
kwargs=kwargs)

tensorflow/python/distribute/multi_worker_test_base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -271,7 +271,7 @@ def start(self):
271271
self._cluster_spec,
272272
args=(self._start_events, self._finish_events),
273273
rpc_layer=self._rpc_layer,
274-
stream_stdout=False,
274+
stream_output=False,
275275
list_stdout=False,
276276
use_dill_for_args=False)
277277
self._mpr.start()

0 commit comments

Comments
 (0)
0