8000 Only set rpc_servers when provided · Nik-Kras/llama-cpp-python@1e42468 · GitHub
[go: up one dir, main page]

Skip to content

Commit 1e42468

Browse files
committed
Only set rpc_servers when provided
1 parent ff88fcb commit 1e42468

File tree

1 file changed

+5
-1
lines changed

1 file changed

+5
-1
lines changed

llama_cpp/llama.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,11 @@ def __init__(
222222
) # 0x7FFFFFFF is INT32 max, will be auto set to all layers
223223
self.model_params.split_mode = split_mode
224224
self.model_params.main_gpu = main_gpu
225-
self.model_params.rpc_servers = rpc_servers.encode('utf-8')
225+
if rpc_servers is not None:
226+
self.model_params.rpc_servers = rpc_servers.encode('utf-8')
227+
self._rpc_servers = rpc_servers
228+
else:
229+
self._rpc_servers = None
226230
self.tensor_split = tensor_split
227231
self._c_tensor_split = None
228232
if self.tensor_split is not None:

0 commit comments

Comments
 (0)
0